mirror of
https://github.com/photoprism/photoprism.git
synced 2025-12-12 00:34:13 +01:00
Indexer: Add optimize command & automatically fix location data #546
This commit is contained in:
@@ -56,6 +56,7 @@ func main() {
|
||||
commands.IndexCommand,
|
||||
commands.ImportCommand,
|
||||
commands.MomentsCommand,
|
||||
commands.OptimizeCommand,
|
||||
commands.PurgeCommand,
|
||||
commands.CopyCommand,
|
||||
commands.ConvertCommand,
|
||||
|
||||
@@ -68,7 +68,6 @@ func UpdatePhoto(router *gin.RouterGroup) {
|
||||
return
|
||||
}
|
||||
|
||||
conf := service.Config()
|
||||
uid := c.Param("uid")
|
||||
m, err := query.PhotoByUID(uid)
|
||||
|
||||
@@ -93,7 +92,7 @@ func UpdatePhoto(router *gin.RouterGroup) {
|
||||
}
|
||||
|
||||
// 3) Save model with values from form
|
||||
if err := entity.SavePhotoForm(m, f, conf.GeoCodingApi()); err != nil {
|
||||
if err := entity.SavePhotoForm(m, f); err != nil {
|
||||
Abort(c, http.StatusInternalServerError, i18n.ErrSaveFailed)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -97,8 +97,8 @@ func configAction(ctx *cli.Context) error {
|
||||
fmt.Printf("%-25s %t\n", "sidecar-yaml", conf.SidecarYaml())
|
||||
fmt.Printf("%-25s %s\n", "sidecar-path", conf.SidecarPath())
|
||||
|
||||
// Places / Geocoding API configuration.
|
||||
fmt.Printf("%-25s %s\n", "geocoding-api", conf.GeoCodingApi())
|
||||
// Geo data API.
|
||||
fmt.Printf("%-25s %s\n", "geo-api", conf.GeoApi())
|
||||
|
||||
// Thumbs, resampling and download security token.
|
||||
fmt.Printf("%-25s %s\n", "download-token", conf.DownloadToken())
|
||||
|
||||
54
internal/commands/optimize.go
Normal file
54
internal/commands/optimize.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/workers"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/config"
|
||||
"github.com/photoprism/photoprism/internal/service"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
// OptimizeCommand is used to register the index cli command.
|
||||
var OptimizeCommand = cli.Command{
|
||||
Name: "optimize",
|
||||
Usage: "Starts metadata check and optimization",
|
||||
Action: optimizeAction,
|
||||
}
|
||||
|
||||
// optimizeAction starts metadata check and optimization.
|
||||
func optimizeAction(ctx *cli.Context) error {
|
||||
start := time.Now()
|
||||
|
||||
conf := config.NewConfig(ctx)
|
||||
service.SetConfig(conf)
|
||||
|
||||
_, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
if err := conf.Init(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
conf.InitDb()
|
||||
|
||||
if conf.ReadOnly() {
|
||||
log.Infof("read-only mode enabled")
|
||||
}
|
||||
|
||||
worker := workers.NewMeta(conf)
|
||||
|
||||
if err := worker.Start(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
elapsed := time.Since(start)
|
||||
|
||||
log.Infof("completed in %s", elapsed)
|
||||
}
|
||||
|
||||
conf.Shutdown()
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/entity"
|
||||
|
||||
"github.com/jinzhu/gorm"
|
||||
_ "github.com/jinzhu/gorm/dialects/mysql"
|
||||
_ "github.com/jinzhu/gorm/dialects/sqlite"
|
||||
@@ -81,6 +83,7 @@ func (c *Config) Propagate() {
|
||||
thumb.Filter = c.ThumbFilter()
|
||||
thumb.JpegQuality = c.JpegQuality()
|
||||
places.UserAgent = c.UserAgent()
|
||||
entity.GeoApi = c.GeoApi()
|
||||
|
||||
c.Settings().Propagate()
|
||||
c.Hub().Propagate()
|
||||
@@ -257,9 +260,9 @@ func (c *Config) WakeupInterval() time.Duration {
|
||||
return time.Duration(c.params.WakeupInterval) * time.Second
|
||||
}
|
||||
|
||||
// GeoCodingApi returns the preferred geo coding api (none, osm or places).
|
||||
func (c *Config) GeoCodingApi() string {
|
||||
switch c.params.GeoCodingApi {
|
||||
// GeoApi returns the preferred geo coding api (none, osm or places).
|
||||
func (c *Config) GeoApi() string {
|
||||
switch c.params.GeoApi {
|
||||
case "places":
|
||||
return "places"
|
||||
case "osm":
|
||||
|
||||
@@ -284,14 +284,14 @@ func TestConfig_WakeupInterval(t *testing.T) {
|
||||
assert.Equal(t, time.Duration(900000000000), c.WakeupInterval())
|
||||
}
|
||||
|
||||
func TestConfig_GeoCodingApi(t *testing.T) {
|
||||
func TestConfig_GeoApi(t *testing.T) {
|
||||
c := NewConfig(CliTestContext())
|
||||
|
||||
assert.Equal(t, "", c.GeoCodingApi())
|
||||
c.params.GeoCodingApi = "places"
|
||||
assert.Equal(t, "places", c.GeoCodingApi())
|
||||
c.params.GeoCodingApi = "osm"
|
||||
assert.Equal(t, "osm", c.GeoCodingApi())
|
||||
assert.Equal(t, "", c.GeoApi())
|
||||
c.params.GeoApi = "places"
|
||||
assert.Equal(t, "places", c.GeoApi())
|
||||
c.params.GeoApi = "osm"
|
||||
assert.Equal(t, "osm", c.GeoApi())
|
||||
}
|
||||
|
||||
func TestConfig_OriginalsLimit(t *testing.T) {
|
||||
|
||||
@@ -256,10 +256,10 @@ var GlobalFlags = []cli.Flag{
|
||||
EnvVar: "PHOTOPRISM_UPLOAD_NSFW",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "geocoding-api, g",
|
||||
Usage: "geocoding api (none, osm or places)",
|
||||
Name: "geo-api, g",
|
||||
Usage: "geo data api (none, osm or places)",
|
||||
Value: "places",
|
||||
EnvVar: "PHOTOPRISM_GEOCODING_API",
|
||||
EnvVar: "PHOTOPRISM_GEO_API",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "download-token",
|
||||
|
||||
@@ -81,7 +81,7 @@ type Params struct {
|
||||
DetachServer bool `yaml:"detach-server" flag:"detach-server"`
|
||||
DetectNSFW bool `yaml:"detect-nsfw" flag:"detect-nsfw"`
|
||||
UploadNSFW bool `yaml:"upload-nsfw" flag:"upload-nsfw"`
|
||||
GeoCodingApi string `yaml:"geocoding-api" flag:"geocoding-api"`
|
||||
GeoApi string `yaml:"geo-api" flag:"geo-api"`
|
||||
DownloadToken string `yaml:"download-token" flag:"download-token"`
|
||||
PreviewToken string `yaml:"preview-token" flag:"preview-token"`
|
||||
ThumbFilter string `yaml:"thumb-filter" flag:"thumb-filter"`
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
)
|
||||
|
||||
var log = event.Log
|
||||
var GeoApi = "places"
|
||||
|
||||
func logError(result *gorm.DB) {
|
||||
if result.Error != nil {
|
||||
|
||||
@@ -114,7 +114,7 @@ func NewPhoto() Photo {
|
||||
}
|
||||
|
||||
// SavePhotoForm saves a model in the database using form data.
|
||||
func SavePhotoForm(model Photo, form form.Photo, geoApi string) error {
|
||||
func SavePhotoForm(model Photo, form form.Photo) error {
|
||||
locChanged := model.PhotoLat != form.PhotoLat || model.PhotoLng != form.PhotoLng || model.PhotoCountry != form.PhotoCountry
|
||||
|
||||
if err := deepcopier.Copy(&model).From(form); err != nil {
|
||||
@@ -138,7 +138,7 @@ func SavePhotoForm(model Photo, form form.Photo, geoApi string) error {
|
||||
}
|
||||
|
||||
if locChanged && model.PlaceSrc == SrcManual {
|
||||
locKeywords, labels := model.UpdateLocation(geoApi)
|
||||
locKeywords, labels := model.UpdateLocation()
|
||||
|
||||
model.AddLabels(labels)
|
||||
|
||||
|
||||
@@ -60,11 +60,11 @@ func (m *Photo) GetTakenAt() time.Time {
|
||||
}
|
||||
|
||||
// UpdateLocation updates location and labels based on latitude and longitude.
|
||||
func (m *Photo) UpdateLocation(geoApi string) (keywords []string, labels classify.Labels) {
|
||||
func (m *Photo) UpdateLocation() (keywords []string, labels classify.Labels) {
|
||||
if m.HasLatLng() {
|
||||
var location = NewCell(m.PhotoLat, m.PhotoLng)
|
||||
|
||||
err := location.Find(geoApi)
|
||||
err := location.Find(GeoApi)
|
||||
|
||||
if location.Place == nil {
|
||||
log.Warnf("photo: failed fetching geo data (uid %s, cell %s)", m.PhotoUID, location.ID)
|
||||
|
||||
@@ -70,7 +70,7 @@ func (m *Photo) EstimatePlace() {
|
||||
Where("place_id <> '' AND place_id <> 'zz' AND place_src <> '' AND place_src <> ?", SrcEstimate).
|
||||
Order(gorm.Expr(dateExpr, m.TakenAt)).
|
||||
Preload("Place").First(&recentPhoto).Error; err != nil {
|
||||
log.Errorf("photo: %s (estimate place)", err.Error())
|
||||
log.Debugf("photo: can't estimate place at %s", m.TakenAt)
|
||||
m.EstimateCountry()
|
||||
} else {
|
||||
if hours := recentPhoto.TakenAt.Sub(m.TakenAt) / time.Hour; hours < -36 || hours > 36 {
|
||||
@@ -99,6 +99,10 @@ func (m *Photo) Optimize() (updated bool, err error) {
|
||||
|
||||
current := *m
|
||||
|
||||
if m.HasLatLng() && !m.HasLocation() {
|
||||
m.UpdateLocation()
|
||||
}
|
||||
|
||||
m.EstimatePlace()
|
||||
|
||||
labels := m.ClassifyLabels()
|
||||
|
||||
@@ -48,7 +48,7 @@ func TestSavePhotoForm(t *testing.T) {
|
||||
|
||||
m := PhotoFixtures["Photo08"]
|
||||
|
||||
err := SavePhotoForm(m, f, "places")
|
||||
err := SavePhotoForm(m, f)
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
|
||||
@@ -615,7 +615,7 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) (
|
||||
}
|
||||
|
||||
var locLabels classify.Labels
|
||||
locKeywords, locLabels = photo.UpdateLocation(ind.conf.GeoCodingApi())
|
||||
locKeywords, locLabels = photo.UpdateLocation()
|
||||
labels = append(labels, locLabels...)
|
||||
}
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ func PhotosCheck(limit int, offset int) (entities entity.Photos, err error) {
|
||||
Preload("Cell").
|
||||
Preload("Cell.Place").
|
||||
Where("checked_at IS NULL OR checked_at < ?", time.Now().Add(-1*time.Hour*24*3)).
|
||||
Where("updated_at < ?", time.Now().Add(-1*time.Minute*10)).
|
||||
Where("updated_at < ? OR (cell_id = 'zz' AND photo_lat <> 0)", time.Now().Add(-1*time.Minute*10)).
|
||||
Limit(limit).Offset(offset).Find(&entities).Error
|
||||
|
||||
return entities, err
|
||||
|
||||
@@ -33,7 +33,7 @@ func (worker *Meta) originalsPath() string {
|
||||
func (worker *Meta) Start() (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = fmt.Errorf("meta-worker: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
err = fmt.Errorf("metadata: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
log.Error(err)
|
||||
}
|
||||
}()
|
||||
@@ -44,7 +44,7 @@ func (worker *Meta) Start() (err error) {
|
||||
|
||||
defer mutex.MetaWorker.Stop()
|
||||
|
||||
log.Debugf("meta-worker: starting routine check")
|
||||
log.Debugf("metadata: starting routine check")
|
||||
|
||||
done := make(map[string]bool)
|
||||
|
||||
@@ -67,7 +67,7 @@ func (worker *Meta) Start() (err error) {
|
||||
|
||||
for _, photo := range photos {
|
||||
if mutex.MetaWorker.Canceled() {
|
||||
return errors.New("meta-worker: check canceled")
|
||||
return errors.New("metadata: check canceled")
|
||||
}
|
||||
|
||||
if done[photo.PhotoUID] {
|
||||
@@ -77,15 +77,15 @@ func (worker *Meta) Start() (err error) {
|
||||
done[photo.PhotoUID] = true
|
||||
|
||||
if updated, err := photo.Optimize(); err != nil {
|
||||
log.Errorf("meta-worker: %s (optimize photo)", err)
|
||||
log.Errorf("metadata: %s (optimize photo)", err)
|
||||
} else if updated {
|
||||
optimized++
|
||||
log.Debugf("meta-worker: optimized photo %s", photo.String())
|
||||
log.Debugf("metadata: optimized photo %s", photo.String())
|
||||
}
|
||||
}
|
||||
|
||||
if mutex.MetaWorker.Canceled() {
|
||||
return errors.New("meta-worker: check canceled")
|
||||
return errors.New("metadata: check canceled")
|
||||
}
|
||||
|
||||
offset += limit
|
||||
@@ -94,15 +94,15 @@ func (worker *Meta) Start() (err error) {
|
||||
}
|
||||
|
||||
if optimized > 0 {
|
||||
log.Infof("meta-worker: optimized %d photos", optimized)
|
||||
log.Infof("metadata: optimized %d photos", optimized)
|
||||
}
|
||||
|
||||
if err := query.ResetPhotoQuality(); err != nil {
|
||||
log.Warnf("meta-worker: %s (reset photo quality)", err.Error())
|
||||
log.Warnf("metadata: %s (reset photo quality)", err.Error())
|
||||
}
|
||||
|
||||
if err := entity.UpdatePhotoCounts(); err != nil {
|
||||
log.Warnf("meta-worker: %s (update photo counts)", err.Error())
|
||||
log.Warnf("metadata: %s (update photo counts)", err.Error())
|
||||
}
|
||||
|
||||
moments := photoprism.NewMoments(worker.conf)
|
||||
|
||||
@@ -29,7 +29,7 @@ func NewShare(conf *config.Config) *Share {
|
||||
// logError logs an error message if err is not nil.
|
||||
func (worker *Share) logError(err error) {
|
||||
if err != nil {
|
||||
log.Errorf("share-worker: %s", err.Error())
|
||||
log.Errorf("share: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ func (worker *Share) logError(err error) {
|
||||
func (worker *Share) Start() (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = fmt.Errorf("share-worker: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
err = fmt.Errorf("share: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
log.Error(err)
|
||||
}
|
||||
}()
|
||||
@@ -89,7 +89,7 @@ func (worker *Share) Start() (err error) {
|
||||
|
||||
if _, ok := existingDirs[dir]; !ok {
|
||||
if err := client.CreateDir(dir); err != nil {
|
||||
log.Errorf("share-worker: failed creating folder %s", dir)
|
||||
log.Errorf("share: failed creating folder %s", dir)
|
||||
continue
|
||||
}
|
||||
}
|
||||
@@ -100,7 +100,7 @@ func (worker *Share) Start() (err error) {
|
||||
thumbType, ok := thumb.Types[a.ShareSize]
|
||||
|
||||
if !ok {
|
||||
log.Errorf("share-worker: invalid size %s", a.ShareSize)
|
||||
log.Errorf("share: invalid size %s", a.ShareSize)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ func (worker *Share) Start() (err error) {
|
||||
file.Errors++
|
||||
file.Error = err.Error()
|
||||
} else {
|
||||
log.Infof("share-worker: uploaded %s to %s", file.RemoteName, a.AccName)
|
||||
log.Infof("share: uploaded %s to %s", file.RemoteName, a.AccName)
|
||||
file.Errors = 0
|
||||
file.Error = ""
|
||||
file.Status = entity.FileShareShared
|
||||
@@ -168,7 +168,7 @@ func (worker *Share) Start() (err error) {
|
||||
file.Errors++
|
||||
file.Error = err.Error()
|
||||
} else {
|
||||
log.Infof("share-worker: removed %s from %s", file.RemoteName, a.AccName)
|
||||
log.Infof("share: removed %s from %s", file.RemoteName, a.AccName)
|
||||
file.Errors = 0
|
||||
file.Error = ""
|
||||
file.Status = entity.FileShareRemoved
|
||||
|
||||
@@ -29,14 +29,14 @@ func NewSync(conf *config.Config) *Sync {
|
||||
// logError logs an error message if err is not nil.
|
||||
func (worker *Sync) logError(err error) {
|
||||
if err != nil {
|
||||
log.Errorf("sync-worker: %s", err.Error())
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// logWarn logs a warning message if err is not nil.
|
||||
func (worker *Sync) logWarn(err error) {
|
||||
if err != nil {
|
||||
log.Warnf("sync-worker: %s", err.Error())
|
||||
log.Warnf("sync: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ func (worker *Sync) logWarn(err error) {
|
||||
func (worker *Sync) Start() (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = fmt.Errorf("sync-worker: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
err = fmt.Errorf("sync: %s (panic)\nstack: %s", r, debug.Stack())
|
||||
log.Error(err)
|
||||
}
|
||||
}()
|
||||
@@ -73,7 +73,7 @@ func (worker *Sync) Start() (err error) {
|
||||
if err := entity.Db().Save(&a).Error; err != nil {
|
||||
worker.logError(err)
|
||||
} else {
|
||||
log.Warnf("sync-worker: disabled sync, %s failed more than %d times", a.AccName, a.RetryLimit)
|
||||
log.Warnf("sync: disabled sync, %s failed more than %d times", a.AccName, a.RetryLimit)
|
||||
}
|
||||
|
||||
continue
|
||||
|
||||
@@ -69,12 +69,12 @@ func (worker *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
}
|
||||
|
||||
if len(relatedFiles) == 0 {
|
||||
log.Infof("sync-worker: download complete for %s", a.AccName)
|
||||
log.Infof("sync: download complete for %s", a.AccName)
|
||||
event.Publish("sync.downloaded", event.Data{"account": a})
|
||||
return true, nil
|
||||
}
|
||||
|
||||
log.Infof("sync-worker: downloading from %s", a.AccName)
|
||||
log.Infof("sync: downloading from %s", a.AccName)
|
||||
|
||||
client := webdav.New(a.AccURL, a.AccUser, a.AccPass)
|
||||
|
||||
@@ -95,14 +95,14 @@ func (worker *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
}
|
||||
|
||||
if file.Errors > a.RetryLimit {
|
||||
log.Debugf("sync-worker: downloading %s failed more than %d times", file.RemoteName, a.RetryLimit)
|
||||
log.Debugf("sync: downloading %s failed more than %d times", file.RemoteName, a.RetryLimit)
|
||||
continue
|
||||
}
|
||||
|
||||
localName := baseDir + file.RemoteName
|
||||
|
||||
if _, err := os.Stat(localName); err == nil {
|
||||
log.Warnf("sync-worker: download skipped, %s already exists", localName)
|
||||
log.Warnf("sync: download skipped, %s already exists", localName)
|
||||
file.Status = entity.FileSyncExists
|
||||
} else {
|
||||
if err := client.Download(file.RemoteName, localName, false); err != nil {
|
||||
@@ -110,7 +110,7 @@ func (worker *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
file.Errors++
|
||||
file.Error = err.Error()
|
||||
} else {
|
||||
log.Infof("sync-worker: downloaded %s from %s", file.RemoteName, a.AccName)
|
||||
log.Infof("sync: downloaded %s from %s", file.RemoteName, a.AccName)
|
||||
file.Status = entity.FileSyncDownloaded
|
||||
}
|
||||
|
||||
@@ -159,7 +159,7 @@ func (worker *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
related.Files = rf
|
||||
|
||||
if a.SyncFilenames {
|
||||
log.Infof("sync-worker: indexing %s and related files", file.RemoteName)
|
||||
log.Infof("sync: indexing %s and related files", file.RemoteName)
|
||||
indexJobs <- photoprism.IndexJob{
|
||||
FileName: mf.FileName(),
|
||||
Related: related,
|
||||
@@ -167,7 +167,7 @@ func (worker *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
Ind: service.Index(),
|
||||
}
|
||||
} else {
|
||||
log.Infof("sync-worker: importing %s and related files", file.RemoteName)
|
||||
log.Infof("sync: importing %s and related files", file.RemoteName)
|
||||
importJobs <- photoprism.ImportJob{
|
||||
FileName: mf.FileName(),
|
||||
Related: related,
|
||||
|
||||
@@ -64,7 +64,7 @@ func (worker *Sync) refresh(a entity.Account) (complete bool, err error) {
|
||||
f = entity.FirstOrCreateFileSync(f)
|
||||
|
||||
if f == nil {
|
||||
log.Errorf("sync-worker: file sync entity should not be nil - bug?")
|
||||
log.Errorf("sync: file sync entity should not be nil - bug?")
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ func (worker *Sync) upload(a entity.Account) (complete bool, err error) {
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
log.Infof("sync-worker: upload complete for %s", a.AccName)
|
||||
log.Infof("sync: upload complete for %s", a.AccName)
|
||||
event.Publish("sync.uploaded", event.Data{"account": a})
|
||||
return true, nil
|
||||
}
|
||||
@@ -44,7 +44,7 @@ func (worker *Sync) upload(a entity.Account) (complete bool, err error) {
|
||||
|
||||
if _, ok := existingDirs[remoteDir]; !ok {
|
||||
if err := client.CreateDir(remoteDir); err != nil {
|
||||
log.Errorf("sync-worker: failed creating remote folder %s", remoteDir)
|
||||
log.Errorf("sync: failed creating remote folder %s", remoteDir)
|
||||
continue // try again next time
|
||||
}
|
||||
}
|
||||
@@ -54,7 +54,7 @@ func (worker *Sync) upload(a entity.Account) (complete bool, err error) {
|
||||
continue // try again next time
|
||||
}
|
||||
|
||||
log.Infof("sync-worker: uploaded %s to %s on %s", fileName, remoteName, a.AccName)
|
||||
log.Infof("sync: uploaded %s to %s on %s", fileName, remoteName, a.AccName)
|
||||
|
||||
fileSync := entity.NewFileSync(a.ID, remoteName)
|
||||
fileSync.Status = entity.FileSyncUploaded
|
||||
|
||||
@@ -45,7 +45,7 @@ func StartMeta(conf *config.Config) {
|
||||
go func() {
|
||||
worker := NewMeta(conf)
|
||||
if err := worker.Start(); err != nil {
|
||||
log.Warnf("meta-worker: %s", err)
|
||||
log.Warnf("metadata: %s", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
@@ -57,7 +57,7 @@ func StartShare(conf *config.Config) {
|
||||
go func() {
|
||||
worker := NewShare(conf)
|
||||
if err := worker.Start(); err != nil {
|
||||
log.Warnf("share-worker: %s", err)
|
||||
log.Warnf("share: %s", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
@@ -69,7 +69,7 @@ func StartSync(conf *config.Config) {
|
||||
go func() {
|
||||
worker := NewSync(conf)
|
||||
if err := worker.Start(); err != nil {
|
||||
log.Warnf("sync-worker: %s", err)
|
||||
log.Warnf("sync: %s", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user