Promise.resolve(response.data));
}
diff --git a/frontend/src/pages/settings/accounts.vue b/frontend/src/pages/settings/accounts.vue
index 1271b27d9..4afb33a7e 100644
--- a/frontend/src/pages/settings/accounts.vue
+++ b/frontend/src/pages/settings/accounts.vue
@@ -30,7 +30,7 @@
sync
sync_disabled
- {{ formatDate(props.item.SyncedAt) }} |
+ {{ formatDate(props.item.SyncDate) }} |
a.RetryLimit {
+ log.Warnf("sync: %s failed more than %d times", a.AccName, a.RetryLimit)
+ continue
+ }
+
+ switch a.SyncStatus {
+ case entity.AccountSyncStatusRefresh:
+ if complete, err := s.getRemoteFiles(a); err != nil {
+ a.AccErrors++
+ a.AccError = err.Error()
+ } else if complete {
+ a.AccErrors = 0
+ a.AccError = ""
+
+ if a.SyncDownload {
+ a.SyncStatus = entity.AccountSyncStatusDownload
+ } else if a.SyncUpload {
+ a.SyncStatus = entity.AccountSyncStatusUpload
+ } else {
+ a.SyncStatus = entity.AccountSyncStatusSynced
+ a.SyncDate.Time = time.Now()
+ a.SyncDate.Valid = true
+ }
+ }
+ case entity.AccountSyncStatusDownload:
+ if complete, err := s.download(a); err != nil {
+ a.AccErrors++
+ a.AccError = err.Error()
+ } else if complete && a.SyncUpload {
+ a.SyncStatus = entity.AccountSyncStatusUpload
+ } else if complete {
+ a.SyncStatus = entity.AccountSyncStatusSynced
+ a.SyncDate.Time = time.Now()
+ a.SyncDate.Valid = true
+ }
+ case entity.AccountSyncStatusUpload:
+ if complete, err := s.upload(a); err != nil {
+ a.AccErrors++
+ a.AccError = err.Error()
+ } else if complete {
+ a.SyncStatus = entity.AccountSyncStatusSynced
+ a.SyncDate.Time = time.Now()
+ a.SyncDate.Valid = true
+ }
+ case entity.AccountSyncStatusSynced:
+ if a.SyncDate.Valid && a.SyncDate.Time.Before(time.Now().Add(time.Duration(-1*a.SyncInterval)*time.Second)) {
+ a.SyncStatus = entity.AccountSyncStatusRefresh
+ }
+ default:
+ a.SyncStatus = entity.AccountSyncStatusRefresh
+ }
+
+ if mutex.Sync.Canceled() {
+ return nil
+ }
+
+ if err := db.Save(&a).Error; err != nil {
+ log.Errorf("sync: %s", err.Error())
+ }
+ }
return err
}
+
+func (s *Sync) getRemoteFiles(a entity.Account) (complete bool, err error) {
+ if a.AccType != service.TypeWebDAV {
+ return false, nil
+ }
+
+ db := s.conf.Db()
+ client := webdav.New(a.AccURL, a.AccUser, a.AccPass)
+
+ subDirs, err := client.Directories(a.SyncPath, true)
+
+ if err != nil {
+ log.Error(err)
+ return false, err
+ }
+
+ dirs := append(subDirs.Abs(), a.SyncPath)
+
+ for _, dir := range dirs {
+ if mutex.Sync.Canceled() {
+ return false, nil
+ }
+
+ files, err := client.Files(dir)
+
+ if err != nil {
+ log.Error(err)
+ return false, err
+ }
+
+ for _, file := range files {
+ if mutex.Sync.Canceled() {
+ return false, nil
+ }
+
+ f := entity.NewFileSync(a.ID, file.Abs)
+ f.RemoteDate = file.Date
+ f.RemoteSize = file.Size
+ f.FirstOrCreate(db)
+
+ if f.Status == entity.FileSyncDownloaded && !f.RemoteDate.Equal(file.Date) {
+ f.Status = entity.FileSyncNew
+ f.RemoteDate = file.Date
+ f.RemoteSize = file.Size
+ db.Save(&f)
+ }
+ }
+ }
+
+ return true, nil
+}
+
+func (s *Sync) download(a entity.Account) (complete bool, err error) {
+ db := s.conf.Db()
+ q := query.New(db)
+
+ files, err := q.FileSyncs(a.ID, entity.FileSyncNew)
+
+ if err != nil {
+ log.Errorf("sync: %s", err.Error())
+ return false, err
+ }
+
+ if len(files) == 0 {
+ // TODO: Subscribe event to start indexing / importing
+ event.Publish("sync.downloaded", event.Data{"account": a})
+ return true, nil
+ }
+
+ client := webdav.New(a.AccURL, a.AccUser, a.AccPass)
+
+ var baseDir string
+
+ if a.SyncFilenames {
+ baseDir = s.conf.OriginalsPath()
+ } else {
+ baseDir = fmt.Sprintf("%s/sync/%d", s.conf.ImportPath(), a.ID)
+ }
+
+ for _, file := range files {
+ if mutex.Sync.Canceled() {
+ return false, nil
+ }
+
+ if file.Errors > a.RetryLimit {
+ log.Warnf("sync: downloading %s failed more than %d times", file.RemoteName, a.RetryLimit)
+ continue
+ }
+
+ localName := baseDir + file.RemoteName
+
+ if err := client.Download(file.RemoteName, localName); err != nil {
+ file.Errors++
+ file.Error = err.Error()
+ } else {
+ file.Status = entity.FileSyncDownloaded
+ }
+
+ if mutex.Sync.Canceled() {
+ return false, nil
+ }
+
+ if err := db.Save(&file).Error; err != nil {
+ log.Errorf("sync: %s", err.Error())
+ }
+ }
+
+ return false, nil
+}
+
+func (s *Sync) upload(a entity.Account) (complete bool, err error) {
+ return false, nil
+}
diff --git a/internal/query/account.go b/internal/query/account.go
index 1d9ec4cc2..52f4f4794 100644
--- a/internal/query/account.go
+++ b/internal/query/account.go
@@ -17,6 +17,10 @@ func (q *Query) Accounts(f form.AccountSearch) (result []entity.Account, err err
s = s.Where("acc_sync = 1")
}
+ if f.Status != "" {
+ s = s.Where("sync_status = ?", f.Status)
+ }
+
s = s.Order("acc_name ASC")
if f.Count > 0 && f.Count <= 1000 {
diff --git a/internal/query/file_share.go b/internal/query/file_share.go
index 09e6f3c10..6a9b80ef2 100644
--- a/internal/query/file_share.go
+++ b/internal/query/file_share.go
@@ -38,7 +38,7 @@ func (q *Query) ExpiredFileShares(account entity.Account) (result []entity.FileS
s := q.db.Where(&entity.FileShare{})
- exp := time.Now().Add(time.Duration(account.ShareExpires)*time.Second)
+ exp := time.Now().Add(time.Duration(-1*account.ShareExpires) * time.Second)
s = s.Where("account_id = ?", account.ID)
s = s.Where("status = ?", entity.FileShareShared)
diff --git a/internal/query/file_sync.go b/internal/query/file_sync.go
new file mode 100644
index 000000000..610ffaf5b
--- /dev/null
+++ b/internal/query/file_sync.go
@@ -0,0 +1,29 @@
+package query
+
+import (
+ "github.com/photoprism/photoprism/internal/entity"
+)
+
+// FileSyncs returns up to 100 file syncs for a given account id and status.
+func (q *Query) FileSyncs(accountId uint, status string) (result []entity.FileSync, err error) {
+ s := q.db.Where(&entity.FileSync{})
+
+ if accountId > 0 {
+ s = s.Where("account_id = ?", accountId)
+ }
+
+ if status != "" {
+ s = s.Where("status = ?", status)
+ }
+
+ s = s.Order("created_at ASC")
+ s = s.Limit(100).Offset(0)
+
+ s = s.Preload("File")
+
+ if err := s.Find(&result).Error; err != nil {
+ return result, err
+ }
+
+ return result, nil
+}
diff --git a/pkg/fs/fileinfo.go b/pkg/fs/fileinfo.go
index a492cfe45..25be3123f 100644
--- a/pkg/fs/fileinfo.go
+++ b/pkg/fs/fileinfo.go
@@ -30,7 +30,7 @@ func NewFileInfo(info os.FileInfo, dir string) FileInfo {
result := FileInfo{
Name: info.Name(),
- Abs: fmt.Sprintf("%s/%s", dir, info.Name()),
+ Abs: fmt.Sprintf("%s/%s", dir, info.Name()),
Size: info.Size(),
Date: info.ModTime(),
Dir: info.IsDir(),
@@ -46,6 +46,13 @@ func (infos FileInfos) Swap(i, j int) { infos[i], infos[j] = infos[j], infos[i]
func (infos FileInfos) Less(i, j int) bool {
return strings.Compare(infos[i].Abs, infos[j].Abs) == -1
}
+func (infos FileInfos) Abs() (result []string) {
+ for _, info := range infos {
+ result = append(result, info.Abs)
+ }
+
+ return result
+}
func NewFileInfos(infos []os.FileInfo, dir string) FileInfos {
var result FileInfos
diff --git a/pkg/txt/resources/stopwords.txt b/pkg/txt/resources/stopwords.txt
index b357b5e47..70877796e 100644
--- a/pkg/txt/resources/stopwords.txt
+++ b/pkg/txt/resources/stopwords.txt
@@ -1,3 +1,19 @@
+sync
+upload
+download
+temp
+user
+users
+var
+lib
+share
+thumb
+thumbs
+thumbnail
+thumbnails
+photos
+import
+export
abc
xyz
jpg
diff --git a/pkg/txt/stopwords.go b/pkg/txt/stopwords.go
index f4a64f280..b6832fe0a 100644
--- a/pkg/txt/stopwords.go
+++ b/pkg/txt/stopwords.go
@@ -3,6 +3,22 @@ package txt
// Stopwords contains a list of stopwords for full-text indexing.
var Stopwords = map[string]bool{
+ "sync": true,
+ "upload": true,
+ "download": true,
+ "temp": true,
+ "user": true,
+ "users": true,
+ "var": true,
+ "lib": true,
+ "share": true,
+ "thumb": true,
+ "thumbs": true,
+ "thumbnail": true,
+ "thumbnails": true,
+ "photos": true,
+ "import": true,
+ "export": true,
"abc": true,
"xyz": true,
"jpg": true,
|