From 45394598d9ecb03668a47d3ab22f40573344df95 Mon Sep 17 00:00:00 2001 From: ssongliu <73214554+ssongliu@users.noreply.github.com> Date: Thu, 25 Jan 2024 11:20:42 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E8=B0=83=E6=95=B4=E8=AE=A1=E5=88=92?= =?UTF-8?q?=E4=BB=BB=E5=8A=A1=E5=A4=87=E4=BB=BD=E8=B4=A6=E5=8F=B7=E5=AD=97?= =?UTF-8?q?=E6=AE=B5=E5=90=8D=20(#3691)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- backend/app/dto/cronjob.go | 76 ++++++++++---------- backend/app/model/cronjob.go | 11 +-- backend/app/repo/cronjob.go | 6 +- backend/app/service/backup.go | 2 +- backend/app/service/backup_website.go | 10 +-- backend/app/service/cornjob.go | 28 ++------ backend/app/service/cronjob_backup.go | 18 ++--- backend/app/service/cronjob_helper.go | 28 ++++---- backend/app/service/snapshot.go | 14 ++-- backend/app/service/snapshot_create.go | 31 +------- backend/init/migration/migrations/v_1_9.go | 16 ++--- cmd/server/docs/docs.go | 28 ++++---- cmd/server/docs/swagger.json | 28 ++++---- cmd/server/docs/swagger.yaml | 19 ++--- frontend/src/api/interface/cronjob.ts | 17 +++-- frontend/src/views/cronjob/index.vue | 20 +++--- frontend/src/views/cronjob/operate/index.vue | 48 +++++-------- 17 files changed, 175 insertions(+), 225 deletions(-) diff --git a/backend/app/dto/cronjob.go b/backend/app/dto/cronjob.go index 7f2003bc9..030bb716a 100644 --- a/backend/app/dto/cronjob.go +++ b/backend/app/dto/cronjob.go @@ -7,18 +7,19 @@ type CronjobCreate struct { Type string `json:"type" validate:"required"` Spec string `json:"spec" validate:"required"` - Script string `json:"script"` - ContainerName string `json:"containerName"` - AppID string `json:"appID"` - Website string `json:"website"` - ExclusionRules string `json:"exclusionRules"` - DBType string `json:"dbType"` - DBName string `json:"dbName"` - URL string `json:"url"` - SourceDir string `json:"sourceDir"` - TargetDirID int `json:"targetDirID"` - TargetAccountIDs string `json:"targetAccountIDs"` - RetainCopies int `json:"retainCopies" validate:"number,min=1"` + Script string `json:"script"` + ContainerName string `json:"containerName"` + AppID string `json:"appID"` + Website string `json:"website"` + ExclusionRules string `json:"exclusionRules"` + DBType string `json:"dbType"` + DBName string `json:"dbName"` + URL string `json:"url"` + SourceDir string `json:"sourceDir"` + + BackupAccounts string `json:"backupAccounts"` + DefaultDownload string `json:"defaultDownload"` + RetainCopies int `json:"retainCopies" validate:"number,min=1"` } type CronjobUpdate struct { @@ -26,18 +27,19 @@ type CronjobUpdate struct { Name string `json:"name" validate:"required"` Spec string `json:"spec" validate:"required"` - Script string `json:"script"` - ContainerName string `json:"containerName"` - AppID string `json:"appID"` - Website string `json:"website"` - ExclusionRules string `json:"exclusionRules"` - DBType string `json:"dbType"` - DBName string `json:"dbName"` - URL string `json:"url"` - SourceDir string `json:"sourceDir"` - TargetDirID int `json:"targetDirID"` - TargetAccountIDs string `json:"targetAccountIDs"` - RetainCopies int `json:"retainCopies" validate:"number,min=1"` + Script string `json:"script"` + ContainerName string `json:"containerName"` + AppID string `json:"appID"` + Website string `json:"website"` + ExclusionRules string `json:"exclusionRules"` + DBType string `json:"dbType"` + DBName string `json:"dbName"` + URL string `json:"url"` + SourceDir string `json:"sourceDir"` + + BackupAccounts string `json:"backupAccounts"` + DefaultDownload string `json:"defaultDownload"` + RetainCopies int `json:"retainCopies" validate:"number,min=1"` } type CronjobUpdateStatus struct { @@ -66,20 +68,18 @@ type CronjobInfo struct { Type string `json:"type"` Spec string `json:"spec"` - Script string `json:"script"` - ContainerName string `json:"containerName"` - AppID string `json:"appID"` - Website string `json:"website"` - ExclusionRules string `json:"exclusionRules"` - DBType string `json:"dbType"` - DBName string `json:"dbName"` - URL string `json:"url"` - SourceDir string `json:"sourceDir"` - TargetDir string `json:"targetDir"` - TargetDirID int `json:"targetDirID"` - TargetAccounts string `json:"targetAccounts"` - TargetAccountIDs string `json:"targetAccountIDs"` - RetainCopies int `json:"retainCopies"` + Script string `json:"script"` + ContainerName string `json:"containerName"` + AppID string `json:"appID"` + Website string `json:"website"` + ExclusionRules string `json:"exclusionRules"` + DBType string `json:"dbType"` + DBName string `json:"dbName"` + URL string `json:"url"` + SourceDir string `json:"sourceDir"` + BackupAccounts string `json:"backupAccounts"` + DefaultDownload string `json:"defaultDownload"` + RetainCopies int `json:"retainCopies"` LastRecordTime string `json:"lastRecordTime"` Status string `json:"status"` diff --git a/backend/app/model/cronjob.go b/backend/app/model/cronjob.go index 03b787090..137968990 100644 --- a/backend/app/model/cronjob.go +++ b/backend/app/model/cronjob.go @@ -19,10 +19,13 @@ type Cronjob struct { SourceDir string `gorm:"type:varchar(256)" json:"sourceDir"` ExclusionRules string `gorm:"longtext" json:"exclusionRules"` - KeepLocal bool `gorm:"type:varchar(64)" json:"keepLocal"` - TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"` - TargetAccountIDs string `gorm:"type:varchar(64)" json:"targetAccountIDs"` - RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"` + // 已废弃 + KeepLocal bool `gorm:"type:varchar(64)" json:"keepLocal"` + TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"` + + BackupAccounts string `gorm:"type:varchar(64)" json:"backupAccounts"` + DefaultDownload string `gorm:"type:varchar(64)" json:"defaultDownload"` + RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"` Status string `gorm:"type:varchar(64)" json:"status"` EntryIDs string `gorm:"type:varchar(64)" json:"entryIDs"` diff --git a/backend/app/repo/cronjob.go b/backend/app/repo/cronjob.go index 272c8984f..684164661 100644 --- a/backend/app/repo/cronjob.go +++ b/backend/app/repo/cronjob.go @@ -20,7 +20,7 @@ type ICronjobRepo interface { Page(limit, offset int, opts ...DBOption) (int64, []model.Cronjob, error) Create(cronjob *model.Cronjob) error WithByJobID(id int) DBOption - WithByBackupID(id uint) DBOption + WithByDefaultDownload(account string) DBOption WithByRecordDropID(id int) DBOption WithByRecordFile(file string) DBOption Save(id uint, cronjob model.Cronjob) error @@ -117,9 +117,9 @@ func (c *CronjobRepo) WithByJobID(id int) DBOption { } } -func (c *CronjobRepo) WithByBackupID(id uint) DBOption { +func (c *CronjobRepo) WithByDefaultDownload(account string) DBOption { return func(g *gorm.DB) *gorm.DB { - return g.Where("target_dir_id = ?", id) + return g.Where("default_download = ?", account) } } diff --git a/backend/app/service/backup.go b/backend/app/service/backup.go index 301a62cf0..8a8b677a9 100644 --- a/backend/app/service/backup.go +++ b/backend/app/service/backup.go @@ -290,7 +290,7 @@ func (u *BackupService) Delete(id uint) error { if backup.Type == constant.OneDrive { global.Cron.Remove(global.OneDriveCronID) } - cronjobs, _ := cronjobRepo.List(cronjobRepo.WithByBackupID(id)) + cronjobs, _ := cronjobRepo.List(cronjobRepo.WithByDefaultDownload(backup.Type)) if len(cronjobs) != 0 { return buserr.New(constant.ErrBackupInUsed) } diff --git a/backend/app/service/backup_website.go b/backend/app/service/backup_website.go index 7ec91e8a7..9c6f16d77 100644 --- a/backend/app/service/backup_website.go +++ b/backend/app/service/backup_website.go @@ -34,7 +34,7 @@ func (u *BackupService) WebsiteBackup(req dto.CommonBackup) error { itemDir := fmt.Sprintf("website/%s", req.Name) backupDir := path.Join(localDir, itemDir) fileName := fmt.Sprintf("%s_%s.tar.gz", website.PrimaryDomain, timeNow) - if err := handleWebsiteBackup(&website, backupDir, fileName); err != nil { + if err := handleWebsiteBackup(&website, backupDir, fileName, ""); err != nil { return err } @@ -106,7 +106,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback isOk := false if !isRollback { rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s_%s.tar.gz", website.Alias, time.Now().Format("20060102150405"))) - if err := handleWebsiteBackup(website, path.Dir(rollbackFile), path.Base(rollbackFile)); err != nil { + if err := handleWebsiteBackup(website, path.Dir(rollbackFile), path.Base(rollbackFile), ""); err != nil { return fmt.Errorf("backup website %s for rollback before recover failed, err: %v", website.Alias, err) } defer func() { @@ -181,7 +181,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback return nil } -func handleWebsiteBackup(website *model.Website, backupDir, fileName string) error { +func handleWebsiteBackup(website *model.Website, backupDir, fileName, exclusionRules string) error { fileOp := files.NewFileOp() tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", "")) if !fileOp.Stat(tmpDir) { @@ -233,11 +233,11 @@ func handleWebsiteBackup(website *model.Website, backupDir, fileName string) err } websiteDir := fmt.Sprintf("%s/openresty/%s/www/sites/%s", constant.AppInstallDir, nginxInfo.Name, website.Alias) - if err := handleTar(websiteDir, tmpDir, fmt.Sprintf("%s.web.tar.gz", website.Alias), ""); err != nil { + if err := handleTar(websiteDir, tmpDir, fmt.Sprintf("%s.web.tar.gz", website.Alias), exclusionRules); err != nil { return err } global.LOG.Info("put web.tar.gz into tmp dir successful, now start to tar tmp dir") - if err := handleTar(tmpDir, backupDir, fileName, ""); err != nil { + if err := handleTar(tmpDir, backupDir, fileName, exclusionRules); err != nil { return err } diff --git a/backend/app/service/cornjob.go b/backend/app/service/cornjob.go index 82684873e..909f1554a 100644 --- a/backend/app/service/cornjob.go +++ b/backend/app/service/cornjob.go @@ -43,32 +43,11 @@ func NewICronjobService() ICronjobService { func (u *CronjobService) SearchWithPage(search dto.SearchWithPage) (int64, interface{}, error) { total, cronjobs, err := cronjobRepo.Page(search.Page, search.PageSize, commonRepo.WithLikeName(search.Info), commonRepo.WithOrderRuleBy(search.OrderBy, search.Order)) var dtoCronjobs []dto.CronjobInfo - accounts, _ := backupRepo.List() for _, cronjob := range cronjobs { var item dto.CronjobInfo if err := copier.Copy(&item, &cronjob); err != nil { return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error()) } - if hasBackup(item.Type) { - for _, account := range accounts { - if int(account.ID) == item.TargetDirID { - item.TargetDir = account.Type - } - } - itemAccounts := strings.Split(item.TargetAccountIDs, ",") - var targetAccounts []string - for _, itemAccount := range itemAccounts { - for _, account := range accounts { - if itemAccount == fmt.Sprintf("%d", account.ID) { - targetAccounts = append(targetAccounts, account.Type) - break - } - } - } - item.TargetAccounts = strings.Join(targetAccounts, ",") - } else { - item.TargetDir = "-" - } record, _ := cronjobRepo.RecordFirst(cronjob.ID) if record.ID != 0 { item.LastRecordTime = record.StartTime.Format("2006-01-02 15:04:05") @@ -120,7 +99,7 @@ func (u *CronjobService) CleanRecord(req dto.CronjobClean) error { } if req.CleanData { if hasBackup(cronjob.Type) { - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -294,8 +273,9 @@ func (u *CronjobService) Update(id uint, req dto.CronjobUpdate) error { upMap["db_name"] = req.DBName upMap["url"] = req.URL upMap["source_dir"] = req.SourceDir - upMap["target_dir_id"] = req.TargetDirID - upMap["target_account_ids"] = req.TargetAccountIDs + + upMap["backup_accounts"] = req.BackupAccounts + upMap["default_download"] = req.DefaultDownload upMap["retain_copies"] = req.RetainCopies return cronjobRepo.Update(id, upMap) } diff --git a/backend/app/service/cronjob_backup.go b/backend/app/service/cronjob_backup.go index d40f877cf..61fea19b6 100644 --- a/backend/app/service/cronjob_backup.go +++ b/backend/app/service/cronjob_backup.go @@ -26,7 +26,7 @@ func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) e } apps = append(apps, app) } - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -59,7 +59,7 @@ func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) e func (u *CronjobService) handleWebsite(cronjob model.Cronjob, startTime time.Time) error { webs := loadWebsForJob(cronjob) - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -73,7 +73,7 @@ func (u *CronjobService) handleWebsite(cronjob model.Cronjob, startTime time.Tim record.Source, record.BackupType = loadRecordPath(cronjob, accountMap) backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s", web.PrimaryDomain)) record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", web.PrimaryDomain, startTime.Format("20060102150405")) - if err := handleWebsiteBackup(&web, backupDir, record.FileName); err != nil { + if err := handleWebsiteBackup(&web, backupDir, record.FileName, cronjob.ExclusionRules); err != nil { return err } downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName)) @@ -92,7 +92,7 @@ func (u *CronjobService) handleWebsite(cronjob model.Cronjob, startTime time.Tim func (u *CronjobService) handleDatabase(cronjob model.Cronjob, startTime time.Time) error { dbs := loadDbsForJob(cronjob) - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -132,7 +132,7 @@ func (u *CronjobService) handleDatabase(cronjob model.Cronjob, startTime time.Ti } func (u *CronjobService) handleDirectory(cronjob model.Cronjob, startTime time.Time) error { - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -163,7 +163,7 @@ func (u *CronjobService) handleDirectory(cronjob model.Cronjob, startTime time.T } func (u *CronjobService) handleSystemLog(cronjob model.Cronjob, startTime time.Time) error { - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -194,7 +194,7 @@ func (u *CronjobService) handleSystemLog(cronjob model.Cronjob, startTime time.T } func (u *CronjobService) handleSnapshot(cronjob model.Cronjob, startTime time.Time, logPath string) error { - accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs) + accountMap, err := loadClientMap(cronjob.BackupAccounts) if err != nil { return err } @@ -288,8 +288,8 @@ func loadWebsForJob(cronjob model.Cronjob) []model.Website { } func loadRecordPath(cronjob model.Cronjob, accountMap map[string]cronjobUploadHelper) (string, string) { - source := accountMap[fmt.Sprintf("%v", cronjob.TargetDirID)].backType - targets := strings.Split(cronjob.TargetAccountIDs, ",") + source := accountMap[fmt.Sprintf("%v", cronjob.DefaultDownload)].backType + targets := strings.Split(cronjob.BackupAccounts, ",") var itemAccounts []string for _, target := range targets { if len(target) == 0 { diff --git a/backend/app/service/cronjob_helper.go b/backend/app/service/cronjob_helper.go index e031c4de6..1f6976803 100644 --- a/backend/app/service/cronjob_helper.go +++ b/backend/app/service/cronjob_helper.go @@ -247,19 +247,19 @@ func (u *CronjobService) handleSystemClean() (string, error) { return NewIDeviceService().CleanForCronjob() } -func (u *CronjobService) loadClientMap(targetAccountIDs string) (map[string]cronjobUploadHelper, error) { +func loadClientMap(backupAccounts string) (map[string]cronjobUploadHelper, error) { clients := make(map[string]cronjobUploadHelper) accounts, err := backupRepo.List() if err != nil { return nil, err } - targets := strings.Split(targetAccountIDs, ",") + targets := strings.Split(backupAccounts, ",") for _, target := range targets { if len(target) == 0 { continue } for _, account := range accounts { - if target == fmt.Sprintf("%v", account.ID) { + if target == account.Type { client, err := NewIBackupService().NewClient(&account) if err != nil { return nil, err @@ -286,11 +286,11 @@ func (u *CronjobService) uploadCronjobBackFile(cronjob model.Cronjob, accountMap defer func() { _ = os.Remove(file) }() - targets := strings.Split(cronjob.TargetAccountIDs, ",") + accounts := strings.Split(cronjob.BackupAccounts, ",") cloudSrc := strings.TrimPrefix(file, global.CONF.System.TmpDir+"/") - for _, target := range targets { - if len(target) != 0 { - if _, err := accountMap[target].client.Upload(file, path.Join(accountMap[target].backupPath, cloudSrc)); err != nil { + for _, account := range accounts { + if len(account) != 0 { + if _, err := accountMap[account].client.Upload(file, path.Join(accountMap[account].backupPath, cloudSrc)); err != nil { return "", err } } @@ -314,18 +314,18 @@ func (u *CronjobService) removeExpiredBackup(cronjob model.Cronjob, accountMap m return } for i := int(cronjob.RetainCopies); i < len(records); i++ { - targets := strings.Split(cronjob.TargetAccountIDs, ",") + accounts := strings.Split(cronjob.BackupAccounts, ",") if cronjob.Type == "snapshot" { - for _, target := range targets { - if len(target) != 0 { - _, _ = accountMap[target].client.Delete(path.Join(accountMap[target].backupPath, "system_snapshot", records[i].FileName)) + for _, account := range accounts { + if len(account) != 0 { + _, _ = accountMap[account].client.Delete(path.Join(accountMap[account].backupPath, "system_snapshot", records[i].FileName)) } } _ = snapshotRepo.Delete(commonRepo.WithByName(strings.TrimSuffix(records[i].FileName, ".tar.gz"))) } else { - for _, target := range targets { - if len(target) != 0 { - _, _ = accountMap[target].client.Delete(path.Join(accountMap[target].backupPath, records[i].FileDir, records[i].FileName)) + for _, account := range accounts { + if len(account) != 0 { + _, _ = accountMap[account].client.Delete(path.Join(accountMap[account].backupPath, records[i].FileDir, records[i].FileName)) } } } diff --git a/backend/app/service/snapshot.go b/backend/app/service/snapshot.go index 227f1827e..1129d21cd 100644 --- a/backend/app/service/snapshot.go +++ b/backend/app/service/snapshot.go @@ -489,11 +489,12 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto rootDir = path.Join(localDir, "system", name) snap = model.Snapshot{ - Name: name, - Description: req.Description, - From: req.From, - Version: versionItem.Value, - Status: constant.StatusWaiting, + Name: name, + Description: req.Description, + From: req.From, + DefaultDownload: req.DefaultDownload, + Version: versionItem.Value, + Status: constant.StatusWaiting, } _ = snapshotRepo.Create(&snap) snapStatus.SnapID = snap.ID @@ -577,18 +578,21 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto loadLogByStatus(snapStatus, logPath) return snap.Name, fmt.Errorf("snapshot %s backup failed", snap.Name) } + loadLogByStatus(snapStatus, logPath) snapPanelData(itemHelper, localDir, backupPanelDir) if snapStatus.PanelData != constant.StatusDone { _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) loadLogByStatus(snapStatus, logPath) return snap.Name, fmt.Errorf("snapshot %s 1panel data failed", snap.Name) } + loadLogByStatus(snapStatus, logPath) snapCompress(itemHelper, rootDir) if snapStatus.Compress != constant.StatusDone { _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) loadLogByStatus(snapStatus, logPath) return snap.Name, fmt.Errorf("snapshot %s compress failed", snap.Name) } + loadLogByStatus(snapStatus, logPath) snapUpload(itemHelper, req.From, fmt.Sprintf("%s.tar.gz", rootDir)) if snapStatus.Upload != constant.StatusDone { _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) diff --git a/backend/app/service/snapshot_create.go b/backend/app/service/snapshot_create.go index a13317a67..0be1fd4fd 100644 --- a/backend/app/service/snapshot_create.go +++ b/backend/app/service/snapshot_create.go @@ -183,7 +183,7 @@ func snapUpload(snap snapHelper, accounts string, file string) { }() _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": constant.StatusUploading}) - accountMap, err := loadClientMapForSnapshot(accounts) + accountMap, err := loadClientMap(accounts) if err != nil { snap.Status.Upload = err.Error() _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()}) @@ -237,32 +237,3 @@ func checkPointOfWal() { global.LOG.Errorf("handle check point failed, err: %v", err) } } - -func loadClientMapForSnapshot(from string) (map[string]cronjobUploadHelper, error) { - clients := make(map[string]cronjobUploadHelper) - accounts, err := backupRepo.List() - if err != nil { - return nil, err - } - targets := strings.Split(from, ",") - for _, target := range targets { - if len(target) == 0 { - continue - } - for _, account := range accounts { - if target == fmt.Sprintf("%v", account.ID) { - client, err := NewIBackupService().NewClient(&account) - if err != nil { - return nil, err - } - pathItem := account.BackupPath - clients[target] = cronjobUploadHelper{ - client: client, - backupPath: pathItem, - backType: account.Type, - } - } - } - } - return clients, nil -} diff --git a/backend/init/migration/migrations/v_1_9.go b/backend/init/migration/migrations/v_1_9.go index 36dd70811..cf2c69117 100644 --- a/backend/init/migration/migrations/v_1_9.go +++ b/backend/init/migration/migrations/v_1_9.go @@ -271,31 +271,22 @@ var UpdateCronjobSpec = &gormigrate.Migration{ var ( jobs []model.Cronjob backupAccounts []model.BackupAccount - localAccountID uint ) mapAccount := make(map[uint]string) - mapAccountName := make(map[string]model.BackupAccount) if err := tx.Find(&jobs).Error; err != nil { return err } _ = tx.Find(&backupAccounts).Error for _, item := range backupAccounts { mapAccount[item.ID] = item.Type - mapAccountName[item.Type] = item - if item.Type == constant.Local { - localAccountID = item.ID - } - } - if localAccountID == 0 { - return errors.New("local backup account is unset!") } for _, job := range jobs { if job.KeepLocal { if err := tx.Model(&model.Cronjob{}). Where("id = ?", job.ID). Updates(map[string]interface{}{ - "target_account_ids": fmt.Sprintf("%v,%v", job.TargetDirID, localAccountID), - "target_dir_id": localAccountID, + "backup_accounts": fmt.Sprintf("%v,%v", mapAccount[uint(job.TargetDirID)], constant.Local), + "default_download": constant.Local, }).Error; err != nil { return err } @@ -303,7 +294,8 @@ var UpdateCronjobSpec = &gormigrate.Migration{ if err := tx.Model(&model.Cronjob{}). Where("id = ?", job.ID). Updates(map[string]interface{}{ - "target_account_ids": job.TargetDirID, + "backup_accounts": mapAccount[uint(job.TargetDirID)], + "default_download": mapAccount[uint(job.TargetDirID)], }).Error; err != nil { return err } diff --git a/cmd/server/docs/docs.go b/cmd/server/docs/docs.go index fe2758d33..d1c78bba0 100644 --- a/cmd/server/docs/docs.go +++ b/cmd/server/docs/docs.go @@ -14958,6 +14958,9 @@ const docTemplate = `{ "appID": { "type": "string" }, + "backupAccounts": { + "type": "string" + }, "containerName": { "type": "string" }, @@ -14967,6 +14970,9 @@ const docTemplate = `{ "dbType": { "type": "string" }, + "defaultDownload": { + "type": "string" + }, "exclusionRules": { "type": "string" }, @@ -14986,12 +14992,6 @@ const docTemplate = `{ "spec": { "type": "string" }, - "targetAccountIDs": { - "type": "string" - }, - "targetDirID": { - "type": "integer" - }, "type": { "type": "string" }, @@ -15029,6 +15029,9 @@ const docTemplate = `{ "appID": { "type": "string" }, + "backupAccounts": { + "type": "string" + }, "containerName": { "type": "string" }, @@ -15038,6 +15041,9 @@ const docTemplate = `{ "dbType": { "type": "string" }, + "defaultDownload": { + "type": "string" + }, "exclusionRules": { "type": "string" }, @@ -15060,12 +15066,6 @@ const docTemplate = `{ "spec": { "type": "string" }, - "targetAccountIDs": { - "type": "string" - }, - "targetDirID": { - "type": "integer" - }, "url": { "type": "string" }, @@ -17892,9 +17892,13 @@ const docTemplate = `{ "dto.SnapshotCreate": { "type": "object", "required": [ + "defaultDownload", "from" ], "properties": { + "defaultDownload": { + "type": "string" + }, "description": { "type": "string", "maxLength": 256 diff --git a/cmd/server/docs/swagger.json b/cmd/server/docs/swagger.json index 50d1c909d..02c83b4bf 100644 --- a/cmd/server/docs/swagger.json +++ b/cmd/server/docs/swagger.json @@ -14951,6 +14951,9 @@ "appID": { "type": "string" }, + "backupAccounts": { + "type": "string" + }, "containerName": { "type": "string" }, @@ -14960,6 +14963,9 @@ "dbType": { "type": "string" }, + "defaultDownload": { + "type": "string" + }, "exclusionRules": { "type": "string" }, @@ -14979,12 +14985,6 @@ "spec": { "type": "string" }, - "targetAccountIDs": { - "type": "string" - }, - "targetDirID": { - "type": "integer" - }, "type": { "type": "string" }, @@ -15022,6 +15022,9 @@ "appID": { "type": "string" }, + "backupAccounts": { + "type": "string" + }, "containerName": { "type": "string" }, @@ -15031,6 +15034,9 @@ "dbType": { "type": "string" }, + "defaultDownload": { + "type": "string" + }, "exclusionRules": { "type": "string" }, @@ -15053,12 +15059,6 @@ "spec": { "type": "string" }, - "targetAccountIDs": { - "type": "string" - }, - "targetDirID": { - "type": "integer" - }, "url": { "type": "string" }, @@ -17885,9 +17885,13 @@ "dto.SnapshotCreate": { "type": "object", "required": [ + "defaultDownload", "from" ], "properties": { + "defaultDownload": { + "type": "string" + }, "description": { "type": "string", "maxLength": 256 diff --git a/cmd/server/docs/swagger.yaml b/cmd/server/docs/swagger.yaml index 2ad3cd1f1..fc0f062dc 100644 --- a/cmd/server/docs/swagger.yaml +++ b/cmd/server/docs/swagger.yaml @@ -580,12 +580,16 @@ definitions: properties: appID: type: string + backupAccounts: + type: string containerName: type: string dbName: type: string dbType: type: string + defaultDownload: + type: string exclusionRules: type: string name: @@ -599,10 +603,6 @@ definitions: type: string spec: type: string - targetAccountIDs: - type: string - targetDirID: - type: integer type: type: string url: @@ -628,12 +628,16 @@ definitions: properties: appID: type: string + backupAccounts: + type: string containerName: type: string dbName: type: string dbType: type: string + defaultDownload: + type: string exclusionRules: type: string id: @@ -649,10 +653,6 @@ definitions: type: string spec: type: string - targetAccountIDs: - type: string - targetDirID: - type: integer url: type: string website: @@ -2564,6 +2564,8 @@ definitions: type: object dto.SnapshotCreate: properties: + defaultDownload: + type: string description: maxLength: 256 type: string @@ -2572,6 +2574,7 @@ definitions: id: type: integer required: + - defaultDownload - from type: object dto.SnapshotImport: diff --git a/frontend/src/api/interface/cronjob.ts b/frontend/src/api/interface/cronjob.ts index 0e84ec9f7..9b8de3a22 100644 --- a/frontend/src/api/interface/cronjob.ts +++ b/frontend/src/api/interface/cronjob.ts @@ -18,9 +18,10 @@ export namespace Cronjob { dbName: string; url: string; sourceDir: string; - targetDirID: number; - targetAccountIDs: string; - targetAccountIDList: Array; + + backupAccounts: string; + defaultDownload: string; + backupAccountList: Array; retainCopies: number; status: string; } @@ -37,8 +38,9 @@ export namespace Cronjob { dbName: string; url: string; sourceDir: string; - targetDirID: number; - targetAccountIDs: string; + + backupAccounts: string; + defaultDownload: string; retainCopies: number; } export interface SpecObj { @@ -60,8 +62,9 @@ export namespace Cronjob { dbName: string; url: string; sourceDir: string; - targetDirID: number; - targetAccountIDs: string; + + backupAccounts: string; + defaultDownload: string; retainCopies: number; } export interface CronjobDelete { diff --git a/frontend/src/views/cronjob/index.vue b/frontend/src/views/cronjob/index.vue index 149e1578a..82c58875b 100644 --- a/frontend/src/views/cronjob/index.vue +++ b/frontend/src/views/cronjob/index.vue @@ -113,12 +113,12 @@ {{ row.lastRecordTime }} - +