1
0
mirror of https://github.com/1Panel-dev/1Panel.git synced 2025-01-31 14:08:06 +08:00

feat: 计划任务支持备份到多个备份账号 (#3689)

This commit is contained in:
ssongliu 2024-01-24 17:41:56 +08:00 committed by GitHub
parent 0f53de56a7
commit edd6b52f05
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
42 changed files with 1479 additions and 1027 deletions

View File

@ -162,6 +162,32 @@ func (b *BaseApi) SearchBackupRecords(c *gin.Context) {
}) })
} }
// @Tags Backup Account
// @Summary Page backup records by cronjob
// @Description 通过计划任务获取备份记录列表分页
// @Accept json
// @Param request body dto.RecordSearchByCronjob true "request"
// @Success 200
// @Security ApiKeyAuth
// @Router /settings/backup/record/search/bycronjob [post]
func (b *BaseApi) SearchBackupRecordsByCronjob(c *gin.Context) {
var req dto.RecordSearchByCronjob
if err := helper.CheckBindAndValidate(&req, c); err != nil {
return
}
total, list, err := backupService.SearchRecordsByCronjobWithPage(req)
if err != nil {
helper.ErrorWithDetail(c, constant.CodeErrInternalServer, constant.ErrTypeInternalServer, err)
return
}
helper.SuccessWithData(c, dto.PageResult{
Items: list,
Total: total,
})
}
// @Tags Backup Account // @Tags Backup Account
// @Summary Download backup record // @Summary Download backup record
// @Description 下载备份记录 // @Description 下载备份记录
@ -345,14 +371,12 @@ func (b *BaseApi) Recover(c *gin.Context) {
return return
} }
if req.Source != "LOCAL" { downloadPath, err := backupService.DownloadRecord(dto.DownloadRecord{Source: req.Source, FileDir: path.Dir(req.File), FileName: path.Base(req.File)})
downloadPath, err := backupService.DownloadRecord(dto.DownloadRecord{Source: req.Source, FileDir: path.Dir(req.File), FileName: path.Base(req.File)}) if err != nil {
if err != nil { helper.ErrorWithDetail(c, constant.CodeErrBadRequest, constant.ErrTypeInvalidParams, fmt.Errorf("download file failed, err: %v", err))
helper.ErrorWithDetail(c, constant.CodeErrBadRequest, constant.ErrTypeInvalidParams, fmt.Errorf("download file failed, err: %v", err)) return
return
}
req.File = downloadPath
} }
req.File = downloadPath
switch req.Type { switch req.Type {
case "mysql", "mariadb": case "mysql", "mariadb":
if err := backupService.MysqlRecover(req); err != nil { if err := backupService.MysqlRecover(req); err != nil {

View File

@ -51,6 +51,11 @@ type RecordSearch struct {
DetailName string `json:"detailName"` DetailName string `json:"detailName"`
} }
type RecordSearchByCronjob struct {
PageInfo
CronjobID uint `json:"cronjobID" validate:"required"`
}
type BackupRecords struct { type BackupRecords struct {
ID uint `json:"id"` ID uint `json:"id"`
CreatedAt time.Time `json:"createdAt"` CreatedAt time.Time `json:"createdAt"`

View File

@ -7,18 +7,18 @@ type CronjobCreate struct {
Type string `json:"type" validate:"required"` Type string `json:"type" validate:"required"`
Spec string `json:"spec" validate:"required"` Spec string `json:"spec" validate:"required"`
Script string `json:"script"` Script string `json:"script"`
ContainerName string `json:"containerName"` ContainerName string `json:"containerName"`
AppID string `json:"appID"` AppID string `json:"appID"`
Website string `json:"website"` Website string `json:"website"`
ExclusionRules string `json:"exclusionRules"` ExclusionRules string `json:"exclusionRules"`
DBType string `json:"dbType"` DBType string `json:"dbType"`
DBName string `json:"dbName"` DBName string `json:"dbName"`
URL string `json:"url"` URL string `json:"url"`
SourceDir string `json:"sourceDir"` SourceDir string `json:"sourceDir"`
KeepLocal bool `json:"keepLocal"` TargetDirID int `json:"targetDirID"`
TargetDirID int `json:"targetDirID"` TargetAccountIDs string `json:"targetAccountIDs"`
RetainCopies int `json:"retainCopies" validate:"number,min=1"` RetainCopies int `json:"retainCopies" validate:"number,min=1"`
} }
type CronjobUpdate struct { type CronjobUpdate struct {
@ -26,18 +26,18 @@ type CronjobUpdate struct {
Name string `json:"name" validate:"required"` Name string `json:"name" validate:"required"`
Spec string `json:"spec" validate:"required"` Spec string `json:"spec" validate:"required"`
Script string `json:"script"` Script string `json:"script"`
ContainerName string `json:"containerName"` ContainerName string `json:"containerName"`
AppID string `json:"appID"` AppID string `json:"appID"`
Website string `json:"website"` Website string `json:"website"`
ExclusionRules string `json:"exclusionRules"` ExclusionRules string `json:"exclusionRules"`
DBType string `json:"dbType"` DBType string `json:"dbType"`
DBName string `json:"dbName"` DBName string `json:"dbName"`
URL string `json:"url"` URL string `json:"url"`
SourceDir string `json:"sourceDir"` SourceDir string `json:"sourceDir"`
KeepLocal bool `json:"keepLocal"` TargetDirID int `json:"targetDirID"`
TargetDirID int `json:"targetDirID"` TargetAccountIDs string `json:"targetAccountIDs"`
RetainCopies int `json:"retainCopies" validate:"number,min=1"` RetainCopies int `json:"retainCopies" validate:"number,min=1"`
} }
type CronjobUpdateStatus struct { type CronjobUpdateStatus struct {
@ -66,19 +66,20 @@ type CronjobInfo struct {
Type string `json:"type"` Type string `json:"type"`
Spec string `json:"spec"` Spec string `json:"spec"`
Script string `json:"script"` Script string `json:"script"`
ContainerName string `json:"containerName"` ContainerName string `json:"containerName"`
AppID string `json:"appID"` AppID string `json:"appID"`
Website string `json:"website"` Website string `json:"website"`
ExclusionRules string `json:"exclusionRules"` ExclusionRules string `json:"exclusionRules"`
DBType string `json:"dbType"` DBType string `json:"dbType"`
DBName string `json:"dbName"` DBName string `json:"dbName"`
URL string `json:"url"` URL string `json:"url"`
SourceDir string `json:"sourceDir"` SourceDir string `json:"sourceDir"`
KeepLocal bool `json:"keepLocal"` TargetDir string `json:"targetDir"`
TargetDir string `json:"targetDir"` TargetDirID int `json:"targetDirID"`
TargetDirID int `json:"targetDirID"` TargetAccounts string `json:"targetAccounts"`
RetainCopies int `json:"retainCopies"` TargetAccountIDs string `json:"targetAccountIDs"`
RetainCopies int `json:"retainCopies"`
LastRecordTime string `json:"lastRecordTime"` LastRecordTime string `json:"lastRecordTime"`
Status string `json:"status"` Status string `json:"status"`

View File

@ -100,7 +100,7 @@ type SnapshotStatus struct {
type SnapshotCreate struct { type SnapshotCreate struct {
ID uint `json:"id"` ID uint `json:"id"`
From string `json:"from" validate:"required,oneof=OSS S3 SFTP MINIO COS KODO OneDrive WebDAV"` From string `json:"from" validate:"required"`
Description string `json:"description" validate:"max=256"` Description string `json:"description" validate:"max=256"`
} }
type SnapshotRecover struct { type SnapshotRecover struct {

View File

@ -12,6 +12,8 @@ type BackupAccount struct {
type BackupRecord struct { type BackupRecord struct {
BaseModel BaseModel
From string `gorm:"type:varchar(64)" json:"from"`
CronjobID uint `gorm:"type:decimal" json:"cronjobID"`
Type string `gorm:"type:varchar(64);not null" json:"type"` Type string `gorm:"type:varchar(64);not null" json:"type"`
Name string `gorm:"type:varchar(64);not null" json:"name"` Name string `gorm:"type:varchar(64);not null" json:"name"`
DetailName string `gorm:"type:varchar(256)" json:"detailName"` DetailName string `gorm:"type:varchar(256)" json:"detailName"`

View File

@ -19,9 +19,10 @@ type Cronjob struct {
SourceDir string `gorm:"type:varchar(256)" json:"sourceDir"` SourceDir string `gorm:"type:varchar(256)" json:"sourceDir"`
ExclusionRules string `gorm:"longtext" json:"exclusionRules"` ExclusionRules string `gorm:"longtext" json:"exclusionRules"`
KeepLocal bool `gorm:"type:varchar(64)" json:"keepLocal"` KeepLocal bool `gorm:"type:varchar(64)" json:"keepLocal"`
TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"` TargetDirID uint64 `gorm:"type:decimal" json:"targetDirID"`
RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"` TargetAccountIDs string `gorm:"type:varchar(64)" json:"targetAccountIDs"`
RetainCopies uint64 `gorm:"type:decimal" json:"retainCopies"`
Status string `gorm:"type:varchar(64)" json:"status"` Status string `gorm:"type:varchar(64)" json:"status"`
EntryIDs string `gorm:"type:varchar(64)" json:"entryIDs"` EntryIDs string `gorm:"type:varchar(64)" json:"entryIDs"`

View File

@ -2,6 +2,7 @@ package repo
import ( import (
"context" "context"
"github.com/1Panel-dev/1Panel/backend/app/model" "github.com/1Panel-dev/1Panel/backend/app/model"
"github.com/1Panel-dev/1Panel/backend/global" "github.com/1Panel-dev/1Panel/backend/global"
"gorm.io/gorm" "gorm.io/gorm"
@ -23,6 +24,7 @@ type IBackupRepo interface {
WithByDetailName(detailName string) DBOption WithByDetailName(detailName string) DBOption
WithByFileName(fileName string) DBOption WithByFileName(fileName string) DBOption
WithByType(backupType string) DBOption WithByType(backupType string) DBOption
WithByCronID(cronjobID uint) DBOption
} }
func NewIBackupRepo() IBackupRepo { func NewIBackupRepo() IBackupRepo {
@ -125,3 +127,9 @@ func (u *BackupRepo) Delete(opts ...DBOption) error {
func (u *BackupRepo) DeleteRecord(ctx context.Context, opts ...DBOption) error { func (u *BackupRepo) DeleteRecord(ctx context.Context, opts ...DBOption) error {
return getTx(ctx, opts...).Delete(&model.BackupRecord{}).Error return getTx(ctx, opts...).Delete(&model.BackupRecord{}).Error
} }
func (u *BackupRepo) WithByCronID(cronjobID uint) DBOption {
return func(g *gorm.DB) *gorm.DB {
return g.Where("cronjob_id = ?", cronjobID)
}
}

View File

@ -28,6 +28,7 @@ type ICronjobRepo interface {
Delete(opts ...DBOption) error Delete(opts ...DBOption) error
DeleteRecord(opts ...DBOption) error DeleteRecord(opts ...DBOption) error
StartRecords(cronjobID uint, fromLocal bool, targetPath string) model.JobRecords StartRecords(cronjobID uint, fromLocal bool, targetPath string) model.JobRecords
UpdateRecords(id uint, vars map[string]interface{}) error
EndRecords(record model.JobRecords, status, message, records string) EndRecords(record model.JobRecords, status, message, records string)
PageRecords(page, size int, opts ...DBOption) (int64, []model.JobRecords, error) PageRecords(page, size int, opts ...DBOption) (int64, []model.JobRecords, error)
} }
@ -164,6 +165,10 @@ func (u *CronjobRepo) Update(id uint, vars map[string]interface{}) error {
return global.DB.Model(&model.Cronjob{}).Where("id = ?", id).Updates(vars).Error return global.DB.Model(&model.Cronjob{}).Where("id = ?", id).Updates(vars).Error
} }
func (u *CronjobRepo) UpdateRecords(id uint, vars map[string]interface{}) error {
return global.DB.Model(&model.JobRecords{}).Where("id = ?", id).Updates(vars).Error
}
func (u *CronjobRepo) Delete(opts ...DBOption) error { func (u *CronjobRepo) Delete(opts ...DBOption) error {
db := global.DB db := global.DB
for _, opt := range opts { for _, opt := range opts {

View File

@ -28,6 +28,7 @@ type BackupService struct{}
type IBackupService interface { type IBackupService interface {
List() ([]dto.BackupInfo, error) List() ([]dto.BackupInfo, error)
SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error) SearchRecordsWithPage(search dto.RecordSearch) (int64, []dto.BackupRecords, error)
SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error)
LoadOneDriveInfo() (dto.OneDriveInfo, error) LoadOneDriveInfo() (dto.OneDriveInfo, error)
DownloadRecord(info dto.DownloadRecord) (string, error) DownloadRecord(info dto.DownloadRecord) (string, error)
Create(backupDto dto.BackupOperate) error Create(backupDto dto.BackupOperate) error
@ -94,14 +95,43 @@ func (u *BackupService) SearchRecordsWithPage(search dto.RecordSearch) (int64, [
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error()) return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
} }
itemPath := path.Join(records[i].FileDir, records[i].FileName) itemPath := path.Join(records[i].FileDir, records[i].FileName)
if records[i].Source == "LOCAL" { if _, ok := clientMap[records[i].Source]; !ok {
fileInfo, err := os.Stat(itemPath) backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err == nil { if err != nil {
item.Size = fileInfo.Size() global.LOG.Errorf("load backup model %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
} }
client, err := u.NewClient(&backup)
if err != nil {
global.LOG.Errorf("load backup client %s from db failed, err: %v", records[i].Source, err)
return total, datas, err
}
item.Size, _ = client.Size(path.Join(strings.TrimLeft(backup.BackupPath, "/"), itemPath))
datas = append(datas, item) datas = append(datas, item)
clientMap[records[i].Source] = loadSizeHelper{backupPath: strings.TrimLeft(backup.BackupPath, "/"), client: client}
continue continue
} }
item.Size, _ = clientMap[records[i].Source].client.Size(path.Join(clientMap[records[i].Source].backupPath, itemPath))
datas = append(datas, item)
}
return total, datas, err
}
func (u *BackupService) SearchRecordsByCronjobWithPage(search dto.RecordSearchByCronjob) (int64, []dto.BackupRecords, error) {
total, records, err := backupRepo.PageRecord(
search.Page, search.PageSize,
commonRepo.WithOrderBy("created_at desc"),
backupRepo.WithByCronID(search.CronjobID),
)
var datas []dto.BackupRecords
clientMap := make(map[string]loadSizeHelper)
for i := 0; i < len(records); i++ {
var item dto.BackupRecords
if err := copier.Copy(&item, &records[i]); err != nil {
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
}
itemPath := path.Join(records[i].FileDir, records[i].FileName)
if _, ok := clientMap[records[i].Source]; !ok { if _, ok := clientMap[records[i].Source]; !ok {
backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source)) backup, err := backupRepo.Get(commonRepo.WithByType(records[i].Source))
if err != nil { if err != nil {
@ -156,7 +186,11 @@ func (u *BackupService) LoadOneDriveInfo() (dto.OneDriveInfo, error) {
func (u *BackupService) DownloadRecord(info dto.DownloadRecord) (string, error) { func (u *BackupService) DownloadRecord(info dto.DownloadRecord) (string, error) {
if info.Source == "LOCAL" { if info.Source == "LOCAL" {
return info.FileDir + "/" + info.FileName, nil localDir, err := loadLocalDir()
if err != nil {
return "", err
}
return path.Join(localDir, info.FileDir, info.FileName), nil
} }
backup, _ := backupRepo.Get(commonRepo.WithByType(info.Source)) backup, _ := backupRepo.Get(commonRepo.WithByType(info.Source))
if backup.ID == 0 { if backup.ID == 0 {
@ -381,9 +415,6 @@ func (u *BackupService) NewClient(backup *model.BackupAccount) (cloud_storage.Cl
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil { if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
return nil, err return nil, err
} }
if backup.Type == "LOCAL" {
return nil, errors.New("not support")
}
varMap["bucket"] = backup.Bucket varMap["bucket"] = backup.Bucket
switch backup.Type { switch backup.Type {
case constant.Sftp, constant.WebDAV: case constant.Sftp, constant.WebDAV:

View File

@ -35,8 +35,8 @@ func (u *BackupService) AppBackup(req dto.CommonBackup) error {
return err return err
} }
timeNow := time.Now().Format("20060102150405") timeNow := time.Now().Format("20060102150405")
itemDir := fmt.Sprintf("app/%s/%s", req.Name, req.DetailName)
backupDir := path.Join(localDir, fmt.Sprintf("app/%s/%s", req.Name, req.DetailName)) backupDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.tar.gz", req.DetailName, timeNow) fileName := fmt.Sprintf("%s_%s.tar.gz", req.DetailName, timeNow)
if err := handleAppBackup(&install, backupDir, fileName); err != nil { if err := handleAppBackup(&install, backupDir, fileName); err != nil {
@ -49,7 +49,7 @@ func (u *BackupService) AppBackup(req dto.CommonBackup) error {
DetailName: req.DetailName, DetailName: req.DetailName,
Source: "LOCAL", Source: "LOCAL",
BackupType: "LOCAL", BackupType: "LOCAL",
FileDir: backupDir, FileDir: itemDir,
FileName: fileName, FileName: fileName,
} }

View File

@ -2,13 +2,14 @@ package service
import ( import (
"fmt" "fmt"
"github.com/1Panel-dev/1Panel/backend/buserr"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"strings" "strings"
"time" "time"
"github.com/1Panel-dev/1Panel/backend/buserr"
"github.com/1Panel-dev/1Panel/backend/app/dto" "github.com/1Panel-dev/1Panel/backend/app/dto"
"github.com/1Panel-dev/1Panel/backend/app/model" "github.com/1Panel-dev/1Panel/backend/app/model"
"github.com/1Panel-dev/1Panel/backend/global" "github.com/1Panel-dev/1Panel/backend/global"
@ -23,7 +24,8 @@ func (u *BackupService) MysqlBackup(req dto.CommonBackup) error {
} }
timeNow := time.Now().Format("20060102150405") timeNow := time.Now().Format("20060102150405")
targetDir := path.Join(localDir, fmt.Sprintf("database/%s/%s/%s", req.Type, req.Name, req.DetailName)) itemDir := fmt.Sprintf("database/%s/%s/%s", req.Type, req.Name, req.DetailName)
targetDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.sql.gz", req.DetailName, timeNow) fileName := fmt.Sprintf("%s_%s.sql.gz", req.DetailName, timeNow)
if err := handleMysqlBackup(req.Name, req.DetailName, targetDir, fileName); err != nil { if err := handleMysqlBackup(req.Name, req.DetailName, targetDir, fileName); err != nil {
@ -36,7 +38,7 @@ func (u *BackupService) MysqlBackup(req dto.CommonBackup) error {
DetailName: req.DetailName, DetailName: req.DetailName,
Source: "LOCAL", Source: "LOCAL",
BackupType: "LOCAL", BackupType: "LOCAL",
FileDir: targetDir, FileDir: itemDir,
FileName: fileName, FileName: fileName,
} }
if err := backupRepo.CreateRecord(record); err != nil { if err := backupRepo.CreateRecord(record); err != nil {

View File

@ -25,7 +25,8 @@ func (u *BackupService) PostgresqlBackup(req dto.CommonBackup) error {
} }
timeNow := time.Now().Format("20060102150405") timeNow := time.Now().Format("20060102150405")
targetDir := path.Join(localDir, fmt.Sprintf("database/%s/%s/%s", req.Type, req.Name, req.DetailName)) itemDir := fmt.Sprintf("database/%s/%s/%s", req.Type, req.Name, req.DetailName)
targetDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.sql.gz", req.DetailName, timeNow) fileName := fmt.Sprintf("%s_%s.sql.gz", req.DetailName, timeNow)
if err := handlePostgresqlBackup(req.Name, req.DetailName, targetDir, fileName); err != nil { if err := handlePostgresqlBackup(req.Name, req.DetailName, targetDir, fileName); err != nil {
@ -38,7 +39,7 @@ func (u *BackupService) PostgresqlBackup(req dto.CommonBackup) error {
DetailName: req.DetailName, DetailName: req.DetailName,
Source: "LOCAL", Source: "LOCAL",
BackupType: "LOCAL", BackupType: "LOCAL",
FileDir: targetDir, FileDir: itemDir,
FileName: fileName, FileName: fileName,
} }
if err := backupRepo.CreateRecord(record); err != nil { if err := backupRepo.CreateRecord(record); err != nil {

View File

@ -43,7 +43,8 @@ func (u *BackupService) RedisBackup() error {
fileName = fmt.Sprintf("%s.tar.gz", timeNow) fileName = fmt.Sprintf("%s.tar.gz", timeNow)
} }
} }
backupDir := path.Join(localDir, fmt.Sprintf("database/redis/%s", redisInfo.Name)) itemDir := fmt.Sprintf("database/redis/%s", redisInfo.Name)
backupDir := path.Join(localDir, itemDir)
if err := handleRedisBackup(redisInfo, backupDir, fileName); err != nil { if err := handleRedisBackup(redisInfo, backupDir, fileName); err != nil {
return err return err
} }
@ -51,7 +52,7 @@ func (u *BackupService) RedisBackup() error {
Type: "redis", Type: "redis",
Source: "LOCAL", Source: "LOCAL",
BackupType: "LOCAL", BackupType: "LOCAL",
FileDir: backupDir, FileDir: itemDir,
FileName: fileName, FileName: fileName,
} }
if err := backupRepo.CreateRecord(record); err != nil { if err := backupRepo.CreateRecord(record); err != nil {

View File

@ -31,7 +31,8 @@ func (u *BackupService) WebsiteBackup(req dto.CommonBackup) error {
} }
timeNow := time.Now().Format("20060102150405") timeNow := time.Now().Format("20060102150405")
backupDir := path.Join(localDir, fmt.Sprintf("website/%s", req.Name)) itemDir := fmt.Sprintf("website/%s", req.Name)
backupDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.tar.gz", website.PrimaryDomain, timeNow) fileName := fmt.Sprintf("%s_%s.tar.gz", website.PrimaryDomain, timeNow)
if err := handleWebsiteBackup(&website, backupDir, fileName); err != nil { if err := handleWebsiteBackup(&website, backupDir, fileName); err != nil {
return err return err
@ -43,7 +44,7 @@ func (u *BackupService) WebsiteBackup(req dto.CommonBackup) error {
DetailName: req.DetailName, DetailName: req.DetailName,
Source: "LOCAL", Source: "LOCAL",
BackupType: "LOCAL", BackupType: "LOCAL",
FileDir: backupDir, FileDir: itemDir,
FileName: fileName, FileName: fileName,
} }
if err := backupRepo.CreateRecord(record); err != nil { if err := backupRepo.CreateRecord(record); err != nil {

View File

@ -43,16 +43,29 @@ func NewICronjobService() ICronjobService {
func (u *CronjobService) SearchWithPage(search dto.SearchWithPage) (int64, interface{}, error) { func (u *CronjobService) SearchWithPage(search dto.SearchWithPage) (int64, interface{}, error) {
total, cronjobs, err := cronjobRepo.Page(search.Page, search.PageSize, commonRepo.WithLikeName(search.Info), commonRepo.WithOrderRuleBy(search.OrderBy, search.Order)) total, cronjobs, err := cronjobRepo.Page(search.Page, search.PageSize, commonRepo.WithLikeName(search.Info), commonRepo.WithOrderRuleBy(search.OrderBy, search.Order))
var dtoCronjobs []dto.CronjobInfo var dtoCronjobs []dto.CronjobInfo
accounts, _ := backupRepo.List()
for _, cronjob := range cronjobs { for _, cronjob := range cronjobs {
var item dto.CronjobInfo var item dto.CronjobInfo
if err := copier.Copy(&item, &cronjob); err != nil { if err := copier.Copy(&item, &cronjob); err != nil {
return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error()) return 0, nil, errors.WithMessage(constant.ErrStructTransform, err.Error())
} }
if hasBackup(item.Type) { if hasBackup(item.Type) {
backup, _ := backupRepo.Get(commonRepo.WithByID(uint(item.TargetDirID))) for _, account := range accounts {
if len(backup.Type) != 0 { if int(account.ID) == item.TargetDirID {
item.TargetDir = backup.Type item.TargetDir = account.Type
}
} }
itemAccounts := strings.Split(item.TargetAccountIDs, ",")
var targetAccounts []string
for _, itemAccount := range itemAccounts {
for _, account := range accounts {
if itemAccount == fmt.Sprintf("%d", account.ID) {
targetAccounts = append(targetAccounts, account.Type)
break
}
}
}
item.TargetAccounts = strings.Join(targetAccounts, ",")
} else { } else {
item.TargetDir = "-" item.TargetDir = "-"
} }
@ -105,24 +118,22 @@ func (u *CronjobService) CleanRecord(req dto.CronjobClean) error {
if err != nil { if err != nil {
return err return err
} }
if req.CleanData && hasBackup(cronjob.Type) { if req.CleanData {
cronjob.RetainCopies = 0 if hasBackup(cronjob.Type) {
backup, err := backupRepo.Get(commonRepo.WithByID(uint(cronjob.TargetDirID))) accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
if backup.Type != "LOCAL" {
localDir, err := loadLocalDir()
if err != nil { if err != nil {
return err return err
} }
client, err := NewIBackupService().NewClient(&backup) cronjob.RetainCopies = 0
if err != nil { u.removeExpiredBackup(cronjob, accountMap, model.BackupRecord{})
return err
}
u.HandleRmExpired(backup.Type, backup.BackupPath, localDir, &cronjob, client)
} else { } else {
u.HandleRmExpired(backup.Type, backup.BackupPath, "", &cronjob, nil) u.removeExpiredLog(cronjob)
}
} else {
records, _ := backupRepo.ListRecord(backupRepo.WithByCronID(cronjob.ID))
for _, records := range records {
records.CronjobID = 0
_ = backupRepo.UpdateRecord(&records)
} }
} }
delRecords, err := cronjobRepo.ListRecord(cronjobRepo.WithByJobID(int(req.CronjobID))) delRecords, err := cronjobRepo.ListRecord(cronjobRepo.WithByJobID(int(req.CronjobID)))
@ -283,8 +294,8 @@ func (u *CronjobService) Update(id uint, req dto.CronjobUpdate) error {
upMap["db_name"] = req.DBName upMap["db_name"] = req.DBName
upMap["url"] = req.URL upMap["url"] = req.URL
upMap["source_dir"] = req.SourceDir upMap["source_dir"] = req.SourceDir
upMap["keep_local"] = req.KeepLocal
upMap["target_dir_id"] = req.TargetDirID upMap["target_dir_id"] = req.TargetDirID
upMap["target_account_ids"] = req.TargetAccountIDs
upMap["retain_copies"] = req.RetainCopies upMap["retain_copies"] = req.RetainCopies
return cronjobRepo.Update(id, upMap) return cronjobRepo.Update(id, upMap)
} }

View File

@ -0,0 +1,386 @@
package service
import (
"fmt"
"os"
"path"
"strconv"
"strings"
"time"
"github.com/1Panel-dev/1Panel/backend/app/dto"
"github.com/1Panel-dev/1Panel/backend/app/model"
"github.com/1Panel-dev/1Panel/backend/constant"
"github.com/1Panel-dev/1Panel/backend/global"
)
func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) error {
var apps []model.AppInstall
if cronjob.AppID == "all" {
apps, _ = appInstallRepo.ListBy()
} else {
itemID, _ := (strconv.Atoi(cronjob.AppID))
app, err := appInstallRepo.GetFirst(commonRepo.WithByID(uint(itemID)))
if err != nil {
return err
}
apps = append(apps, app)
}
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
for _, app := range apps {
var record model.BackupRecord
record.From = "cronjob"
record.Type = "app"
record.CronjobID = cronjob.ID
record.Name = app.App.Key
record.DetailName = app.Name
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("app/%s/%s", app.App.Key, app.Name))
record.FileName = fmt.Sprintf("app_%s_%s.tar.gz", app.Name, startTime.Format("20060102150405"))
if err := handleAppBackup(&app, backupDir, record.FileName); err != nil {
return err
}
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName))
if err != nil {
return err
}
record.FileDir = path.Dir(downloadPath)
u.removeExpiredBackup(cronjob, accountMap, record)
}
return nil
}
func (u *CronjobService) handleWebsite(cronjob model.Cronjob, startTime time.Time) error {
webs := loadWebsForJob(cronjob)
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
for _, web := range webs {
var record model.BackupRecord
record.From = "cronjob"
record.Type = "website"
record.CronjobID = cronjob.ID
record.Name = web.PrimaryDomain
record.DetailName = web.Alias
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s", web.PrimaryDomain))
record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", web.PrimaryDomain, startTime.Format("20060102150405"))
if err := handleWebsiteBackup(&web, backupDir, record.FileName); err != nil {
return err
}
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName))
if err != nil {
return err
}
record.FileDir = path.Dir(downloadPath)
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
u.removeExpiredBackup(cronjob, accountMap, record)
}
return nil
}
func (u *CronjobService) handleDatabase(cronjob model.Cronjob, startTime time.Time) error {
dbs := loadDbsForJob(cronjob)
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
for _, dbInfo := range dbs {
var record model.BackupRecord
record.From = "cronjob"
record.Type = dbInfo.DBType
record.CronjobID = cronjob.ID
record.Name = dbInfo.Database
record.DetailName = dbInfo.Name
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("database/%s/%s/%s", dbInfo.DBType, record.Name, dbInfo.Name))
record.FileName = fmt.Sprintf("db_%s_%s.sql.gz", dbInfo.Name, startTime.Format("20060102150405"))
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
if err := handleMysqlBackup(dbInfo.Database, dbInfo.Name, backupDir, record.FileName); err != nil {
return err
}
} else {
if err := handlePostgresqlBackup(dbInfo.Database, dbInfo.Name, backupDir, record.FileName); err != nil {
return err
}
}
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName))
if err != nil {
return err
}
record.FileDir = path.Dir(downloadPath)
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
u.removeExpiredBackup(cronjob, accountMap, record)
}
return nil
}
func (u *CronjobService) handleDirectory(cronjob model.Cronjob, startTime time.Time) error {
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format("20060102150405"))
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name))
if err := handleTar(cronjob.SourceDir, backupDir, fileName, cronjob.ExclusionRules); err != nil {
return err
}
var record model.BackupRecord
record.From = "cronjob"
record.Type = "directory"
record.CronjobID = cronjob.ID
record.Name = cronjob.Name
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, fileName))
if err != nil {
return err
}
record.FileDir = path.Dir(downloadPath)
record.FileName = fileName
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
u.removeExpiredBackup(cronjob, accountMap, record)
return nil
}
func (u *CronjobService) handleSystemLog(cronjob model.Cronjob, startTime time.Time) error {
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
fileName := fmt.Sprintf("system_log_%s.tar.gz", startTime.Format("20060102150405"))
backupDir := path.Join(global.CONF.System.TmpDir, "log", startTime.Format("20060102150405"))
if err := handleBackupLogs(backupDir, fileName); err != nil {
return err
}
var record model.BackupRecord
record.From = "cronjob"
record.Type = "log"
record.CronjobID = cronjob.ID
record.Name = cronjob.Name
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(path.Dir(backupDir), fileName))
if err != nil {
return err
}
record.FileDir = path.Dir(downloadPath)
record.FileName = fileName
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
u.removeExpiredBackup(cronjob, accountMap, record)
return nil
}
func (u *CronjobService) handleSnapshot(cronjob model.Cronjob, startTime time.Time, logPath string) error {
accountMap, err := u.loadClientMap(cronjob.TargetAccountIDs)
if err != nil {
return err
}
var record model.BackupRecord
record.From = "cronjob"
record.Type = "directory"
record.CronjobID = cronjob.ID
record.Name = cronjob.Name
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
record.FileDir = "system_snapshot"
req := dto.SnapshotCreate{
From: record.BackupType,
}
name, err := NewISnapshotService().HandleSnapshot(true, logPath, req, startTime.Format("20060102150405"))
if err != nil {
return err
}
record.FileName = name + ".tar.gz"
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return err
}
u.removeExpiredBackup(cronjob, accountMap, record)
return nil
}
type databaseHelper struct {
DBType string
Database string
Name string
}
func loadDbsForJob(cronjob model.Cronjob) []databaseHelper {
var dbs []databaseHelper
if cronjob.DBName == "all" {
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
mysqlItems, _ := mysqlRepo.List()
for _, mysql := range mysqlItems {
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: mysql.MysqlName,
Name: mysql.Name,
})
}
} else {
pgItems, _ := postgresqlRepo.List()
for _, pg := range pgItems {
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: pg.PostgresqlName,
Name: pg.Name,
})
}
}
return dbs
}
itemID, _ := (strconv.Atoi(cronjob.DBName))
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
mysqlItem, _ := mysqlRepo.Get(commonRepo.WithByID(uint(itemID)))
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: mysqlItem.MysqlName,
Name: mysqlItem.Name,
})
} else {
pgItem, _ := postgresqlRepo.Get(commonRepo.WithByID(uint(itemID)))
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: pgItem.PostgresqlName,
Name: pgItem.Name,
})
}
return dbs
}
func loadWebsForJob(cronjob model.Cronjob) []model.Website {
var weblist []model.Website
if cronjob.Website == "all" {
weblist, _ = websiteRepo.List()
return weblist
}
itemID, _ := (strconv.Atoi(cronjob.Website))
webItem, _ := websiteRepo.GetFirst(commonRepo.WithByID(uint(itemID)))
if webItem.ID != 0 {
weblist = append(weblist, webItem)
}
return weblist
}
func loadRecordPath(cronjob model.Cronjob, accountMap map[string]cronjobUploadHelper) (string, string) {
source := accountMap[fmt.Sprintf("%v", cronjob.TargetDirID)].backType
targets := strings.Split(cronjob.TargetAccountIDs, ",")
var itemAccounts []string
for _, target := range targets {
if len(target) == 0 {
continue
}
if len(accountMap[target].backType) != 0 {
itemAccounts = append(itemAccounts, accountMap[target].backType)
}
}
backupType := strings.Join(itemAccounts, ",")
return source, backupType
}
func handleBackupLogs(targetDir, fileName string) error {
websites, err := websiteRepo.List()
if err != nil {
return err
}
if len(websites) != 0 {
nginxInstall, err := getAppInstallByKey(constant.AppOpenresty)
if err != nil {
return err
}
webItem := path.Join(nginxInstall.GetPath(), "www/sites")
for _, website := range websites {
dirItem := path.Join(targetDir, "website", website.Alias)
if _, err := os.Stat(dirItem); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(dirItem, os.ModePerm); err != nil {
return err
}
}
itemDir := path.Join(webItem, website.Alias, "log")
logFiles, _ := os.ReadDir(itemDir)
if len(logFiles) != 0 {
for i := 0; i < len(logFiles); i++ {
if !logFiles[i].IsDir() {
_ = cpBinary([]string{path.Join(itemDir, logFiles[i].Name())}, dirItem)
}
}
}
itemDir2 := path.Join(global.CONF.System.Backup, "log/website", website.Alias)
logFiles2, _ := os.ReadDir(itemDir2)
if len(logFiles2) != 0 {
for i := 0; i < len(logFiles2); i++ {
if !logFiles2[i].IsDir() {
_ = cpBinary([]string{path.Join(itemDir2, logFiles2[i].Name())}, dirItem)
}
}
}
}
global.LOG.Debug("backup website log successful!")
}
systemLogDir := path.Join(global.CONF.System.BaseDir, "1panel/log")
systemDir := path.Join(targetDir, "system")
if _, err := os.Stat(systemDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(systemDir, os.ModePerm); err != nil {
return err
}
}
systemLogFiles, _ := os.ReadDir(systemLogDir)
if len(systemLogFiles) != 0 {
for i := 0; i < len(systemLogFiles); i++ {
if !systemLogFiles[i].IsDir() {
_ = cpBinary([]string{path.Join(systemLogDir, systemLogFiles[i].Name())}, systemDir)
}
}
}
global.LOG.Debug("backup system log successful!")
loginLogFiles, _ := os.ReadDir("/var/log")
loginDir := path.Join(targetDir, "login")
if _, err := os.Stat(loginDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(loginDir, os.ModePerm); err != nil {
return err
}
}
if len(loginLogFiles) != 0 {
for i := 0; i < len(loginLogFiles); i++ {
if !loginLogFiles[i].IsDir() && (strings.HasPrefix(loginLogFiles[i].Name(), "secure") || strings.HasPrefix(loginLogFiles[i].Name(), "auth.log")) {
_ = cpBinary([]string{path.Join("/var/log", loginLogFiles[i].Name())}, loginDir)
}
}
}
global.LOG.Debug("backup ssh log successful!")
if err := handleTar(targetDir, path.Dir(targetDir), fileName, ""); err != nil {
return err
}
defer func() {
_ = os.RemoveAll(targetDir)
}()
return nil
}

View File

@ -5,15 +5,14 @@ import (
"fmt" "fmt"
"os" "os"
"path" "path"
"strconv"
"strings" "strings"
"time" "time"
"github.com/1Panel-dev/1Panel/backend/buserr" "github.com/1Panel-dev/1Panel/backend/buserr"
"github.com/1Panel-dev/1Panel/backend/i18n" "github.com/1Panel-dev/1Panel/backend/i18n"
"github.com/1Panel-dev/1Panel/backend/app/dto"
"github.com/1Panel-dev/1Panel/backend/app/model" "github.com/1Panel-dev/1Panel/backend/app/model"
"github.com/1Panel-dev/1Panel/backend/app/repo"
"github.com/1Panel-dev/1Panel/backend/constant" "github.com/1Panel-dev/1Panel/backend/constant"
"github.com/1Panel-dev/1Panel/backend/global" "github.com/1Panel-dev/1Panel/backend/global"
"github.com/1Panel-dev/1Panel/backend/utils/cloud_storage" "github.com/1Panel-dev/1Panel/backend/utils/cloud_storage"
@ -35,32 +34,25 @@ func (u *CronjobService) HandleJob(cronjob *model.Cronjob) {
if len(cronjob.Script) == 0 { if len(cronjob.Script) == 0 {
return return
} }
record.Records = u.generateLogsPath(*cronjob, record.StartTime)
_ = cronjobRepo.UpdateRecords(record.ID, map[string]interface{}{"records": record.Records})
script := cronjob.Script
if len(cronjob.ContainerName) != 0 { if len(cronjob.ContainerName) != 0 {
message, err = u.handleShell(cronjob.Type, cronjob.Name, fmt.Sprintf("docker exec %s %s", cronjob.ContainerName, cronjob.Script)) script = fmt.Sprintf("docker exec %s %s", cronjob.ContainerName, cronjob.Script)
} else {
message, err = u.handleShell(cronjob.Type, cronjob.Name, cronjob.Script)
} }
u.HandleRmExpired("LOCAL", "", "", cronjob, nil) err = u.handleShell(cronjob.Type, cronjob.Name, script, record.Records)
case "snapshot": u.removeExpiredLog(*cronjob)
messageItem := ""
messageItem, record.File, err = u.handleSnapshot(cronjob, record.StartTime)
message = []byte(messageItem)
case "curl": case "curl":
if len(cronjob.URL) == 0 { if len(cronjob.URL) == 0 {
return return
} }
message, err = u.handleShell(cronjob.Type, cronjob.Name, fmt.Sprintf("curl '%s'", cronjob.URL)) record.Records = u.generateLogsPath(*cronjob, record.StartTime)
u.HandleRmExpired("LOCAL", "", "", cronjob, nil) _ = cronjobRepo.UpdateRecords(record.ID, map[string]interface{}{"records": record.Records})
err = u.handleShell(cronjob.Type, cronjob.Name, fmt.Sprintf("curl '%s'", cronjob.URL), record.Records)
u.removeExpiredLog(*cronjob)
case "ntp": case "ntp":
err = u.handleNtpSync() err = u.handleNtpSync()
u.HandleRmExpired("LOCAL", "", "", cronjob, nil) u.removeExpiredLog(*cronjob)
case "website", "database", "app":
record.File, err = u.handleBackup(cronjob, record.StartTime)
case "directory":
if len(cronjob.SourceDir) == 0 {
return
}
record.File, err = u.handleBackup(cronjob, record.StartTime)
case "cutWebsiteLog": case "cutWebsiteLog":
var messageItem []string var messageItem []string
messageItem, record.File, err = u.handleCutWebsiteLog(cronjob, record.StartTime) messageItem, record.File, err = u.handleCutWebsiteLog(cronjob, record.StartTime)
@ -69,9 +61,24 @@ func (u *CronjobService) HandleJob(cronjob *model.Cronjob) {
messageItem := "" messageItem := ""
messageItem, err = u.handleSystemClean() messageItem, err = u.handleSystemClean()
message = []byte(messageItem) message = []byte(messageItem)
u.HandleRmExpired("LOCAL", "", "", cronjob, nil) u.removeExpiredLog(*cronjob)
case "website":
err = u.handleWebsite(*cronjob, record.StartTime)
case "app":
err = u.handleApp(*cronjob, record.StartTime)
case "database":
err = u.handleDatabase(*cronjob, record.StartTime)
case "directory":
if len(cronjob.SourceDir) == 0 {
return
}
err = u.handleDirectory(*cronjob, record.StartTime)
case "log": case "log":
record.File, err = u.handleSystemLog(*cronjob, record.StartTime) err = u.handleSystemLog(*cronjob, record.StartTime)
case "snapshot":
record.Records = u.generateLogsPath(*cronjob, record.StartTime)
_ = cronjobRepo.UpdateRecords(record.ID, map[string]interface{}{"records": record.Records})
err = u.handleSnapshot(*cronjob, record.StartTime, record.Records)
} }
if err != nil { if err != nil {
@ -88,18 +95,17 @@ func (u *CronjobService) HandleJob(cronjob *model.Cronjob) {
}() }()
} }
func (u *CronjobService) handleShell(cronType, cornName, script string) ([]byte, error) { func (u *CronjobService) handleShell(cronType, cornName, script, logPath string) error {
handleDir := fmt.Sprintf("%s/task/%s/%s", constant.DataDir, cronType, cornName) handleDir := fmt.Sprintf("%s/task/%s/%s", constant.DataDir, cronType, cornName)
if _, err := os.Stat(handleDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(handleDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(handleDir, os.ModePerm); err != nil { if err = os.MkdirAll(handleDir, os.ModePerm); err != nil {
return nil, err return err
} }
} }
stdout, err := cmd.ExecCronjobWithTimeOut(script, handleDir, 24*time.Hour) if err := cmd.ExecCronjobWithTimeOut(script, handleDir, logPath, 24*time.Hour); err != nil {
if err != nil { return err
return []byte(stdout), err
} }
return []byte(stdout), nil return nil
} }
func (u *CronjobService) handleNtpSync() error { func (u *CronjobService) handleNtpSync() error {
@ -117,98 +123,6 @@ func (u *CronjobService) handleNtpSync() error {
return nil return nil
} }
func (u *CronjobService) handleBackup(cronjob *model.Cronjob, startTime time.Time) (string, error) {
backup, err := backupRepo.Get(commonRepo.WithByID(uint(cronjob.TargetDirID)))
if err != nil {
return "", err
}
localDir, err := loadLocalDir()
if err != nil {
return "", err
}
global.LOG.Infof("start to backup %s %s to %s", cronjob.Type, cronjob.Name, backup.Type)
switch cronjob.Type {
case "database":
paths, err := u.handleDatabase(*cronjob, backup, startTime)
return strings.Join(paths, ","), err
case "app":
paths, err := u.handleApp(*cronjob, backup, startTime)
return strings.Join(paths, ","), err
case "website":
paths, err := u.handleWebsite(*cronjob, backup, startTime)
return strings.Join(paths, ","), err
default:
fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format("20060102150405"))
backupDir := path.Join(localDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name))
itemFileDir := fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name)
global.LOG.Infof("handle tar %s to %s", backupDir, fileName)
if err := handleTar(cronjob.SourceDir, backupDir, fileName, cronjob.ExclusionRules); err != nil {
return "", err
}
var client cloud_storage.CloudStorageClient
if backup.Type != "LOCAL" {
if !cronjob.KeepLocal {
defer func() {
_ = os.RemoveAll(fmt.Sprintf("%s/%s", backupDir, fileName))
}()
}
client, err = NewIBackupService().NewClient(&backup)
if err != nil {
return "", err
}
if len(backup.BackupPath) != 0 {
itemFileDir = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), itemFileDir)
}
if _, err = client.Upload(backupDir+"/"+fileName, itemFileDir+"/"+fileName); err != nil {
return "", err
}
}
u.HandleRmExpired(backup.Type, backup.BackupPath, localDir, cronjob, client)
if backup.Type == "LOCAL" || cronjob.KeepLocal {
return fmt.Sprintf("%s/%s", backupDir, fileName), nil
} else {
return fmt.Sprintf("%s/%s", itemFileDir, fileName), nil
}
}
}
func (u *CronjobService) HandleRmExpired(backType, backupPath, localDir string, cronjob *model.Cronjob, backClient cloud_storage.CloudStorageClient) {
global.LOG.Infof("start to handle remove expired, retain copies: %d", cronjob.RetainCopies)
records, _ := cronjobRepo.ListRecord(cronjobRepo.WithByJobID(int(cronjob.ID)), commonRepo.WithOrderBy("created_at desc"))
if len(records) <= int(cronjob.RetainCopies) {
return
}
for i := int(cronjob.RetainCopies); i < len(records); i++ {
if len(records[i].File) != 0 {
files := strings.Split(records[i].File, ",")
for _, file := range files {
_ = os.Remove(file)
_ = backupRepo.DeleteRecord(context.TODO(), backupRepo.WithByFileName(path.Base(file)))
if backType == "LOCAL" {
continue
}
fileItem := file
if cronjob.KeepLocal {
if len(backupPath) != 0 {
fileItem = path.Join(strings.TrimPrefix(backupPath, "/") + strings.TrimPrefix(file, localDir+"/"))
} else {
fileItem = strings.TrimPrefix(file, localDir+"/")
}
}
if cronjob.Type == "snapshot" {
_ = snapshotRepo.Delete(commonRepo.WithByName(strings.TrimSuffix(path.Base(fileItem), ".tar.gz")))
}
_, _ = backClient.Delete(fileItem)
}
}
_ = cronjobRepo.DeleteRecord(commonRepo.WithByID(uint(records[i].ID)))
_ = os.Remove(records[i].Records)
}
}
func handleTar(sourceDir, targetDir, name, exclusionRules string) error { func handleTar(sourceDir, targetDir, name, exclusionRules string) error {
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil { if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
@ -266,77 +180,6 @@ func handleUnTar(sourceFile, targetDir string) error {
return nil return nil
} }
func (u *CronjobService) handleDatabase(cronjob model.Cronjob, backup model.BackupAccount, startTime time.Time) ([]string, error) {
var paths []string
localDir, err := loadLocalDir()
if err != nil {
return paths, err
}
dbs := loadDbsForJob(cronjob)
var client cloud_storage.CloudStorageClient
if backup.Type != "LOCAL" {
client, err = NewIBackupService().NewClient(&backup)
if err != nil {
return paths, err
}
}
for _, dbInfo := range dbs {
var record model.BackupRecord
record.Type = dbInfo.DBType
record.Source = "LOCAL"
record.BackupType = backup.Type
record.Name = dbInfo.Database
backupDir := path.Join(localDir, fmt.Sprintf("database/%s/%s/%s", dbInfo.DBType, record.Name, dbInfo.Name))
record.FileName = fmt.Sprintf("db_%s_%s.sql.gz", dbInfo.Name, startTime.Format("20060102150405"))
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
if err = handleMysqlBackup(dbInfo.Database, dbInfo.Name, backupDir, record.FileName); err != nil {
return paths, err
}
} else {
if err = handlePostgresqlBackup(dbInfo.Database, dbInfo.Name, backupDir, record.FileName); err != nil {
return paths, err
}
}
record.DetailName = dbInfo.Name
record.FileDir = backupDir
itemFileDir := strings.TrimPrefix(backupDir, localDir+"/")
if !cronjob.KeepLocal && backup.Type != "LOCAL" {
record.Source = backup.Type
record.FileDir = itemFileDir
}
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return paths, err
}
if backup.Type != "LOCAL" {
if !cronjob.KeepLocal {
defer func() {
_ = os.RemoveAll(fmt.Sprintf("%s/%s", backupDir, record.FileName))
}()
}
if len(backup.BackupPath) != 0 {
itemFileDir = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), itemFileDir)
}
if _, err = client.Upload(backupDir+"/"+record.FileName, itemFileDir+"/"+record.FileName); err != nil {
return paths, err
}
}
if backup.Type == "LOCAL" || cronjob.KeepLocal {
paths = append(paths, fmt.Sprintf("%s/%s", record.FileDir, record.FileName))
} else {
paths = append(paths, fmt.Sprintf("%s/%s", itemFileDir, record.FileName))
}
}
u.HandleRmExpired(backup.Type, backup.BackupPath, localDir, &cronjob, client)
return paths, nil
}
func (u *CronjobService) handleCutWebsiteLog(cronjob *model.Cronjob, startTime time.Time) ([]string, string, error) { func (u *CronjobService) handleCutWebsiteLog(cronjob *model.Cronjob, startTime time.Time) ([]string, string, error) {
var ( var (
err error err error
@ -377,7 +220,7 @@ func (u *CronjobService) handleCutWebsiteLog(cronjob *model.Cronjob, startTime t
global.LOG.Infof(msg) global.LOG.Infof(msg)
msgs = append(msgs, msg) msgs = append(msgs, msg)
} }
u.HandleRmExpired("LOCAL", "", "", cronjob, nil) u.removeExpiredLog(*cronjob)
return msgs, strings.Join(filePaths, ","), err return msgs, strings.Join(filePaths, ","), err
} }
@ -400,340 +243,124 @@ func backupLogFile(dstFilePath, websiteLogDir string, fileOp files.FileOp) error
return nil return nil
} }
func (u *CronjobService) handleApp(cronjob model.Cronjob, backup model.BackupAccount, startTime time.Time) ([]string, error) {
var paths []string
localDir, err := loadLocalDir()
if err != nil {
return paths, err
}
var applist []model.AppInstall
if cronjob.AppID == "all" {
applist, err = appInstallRepo.ListBy()
if err != nil {
return paths, err
}
} else {
itemID, _ := (strconv.Atoi(cronjob.AppID))
app, err := appInstallRepo.GetFirst(commonRepo.WithByID(uint(itemID)))
if err != nil {
return paths, err
}
applist = append(applist, app)
}
var client cloud_storage.CloudStorageClient
if backup.Type != "LOCAL" {
client, err = NewIBackupService().NewClient(&backup)
if err != nil {
return paths, err
}
}
for _, app := range applist {
var record model.BackupRecord
record.Type = "app"
record.Name = app.App.Key
record.DetailName = app.Name
record.Source = "LOCAL"
record.BackupType = backup.Type
backupDir := path.Join(localDir, fmt.Sprintf("app/%s/%s", app.App.Key, app.Name))
record.FileDir = backupDir
itemFileDir := strings.TrimPrefix(backupDir, localDir+"/")
if !cronjob.KeepLocal && backup.Type != "LOCAL" {
record.Source = backup.Type
record.FileDir = strings.TrimPrefix(backupDir, localDir+"/")
}
record.FileName = fmt.Sprintf("app_%s_%s.tar.gz", app.Name, startTime.Format("20060102150405"))
if err := handleAppBackup(&app, backupDir, record.FileName); err != nil {
return paths, err
}
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return paths, err
}
if backup.Type != "LOCAL" {
if !cronjob.KeepLocal {
defer func() {
_ = os.RemoveAll(fmt.Sprintf("%s/%s", backupDir, record.FileName))
}()
}
if len(backup.BackupPath) != 0 {
itemFileDir = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), itemFileDir)
}
if _, err = client.Upload(backupDir+"/"+record.FileName, itemFileDir+"/"+record.FileName); err != nil {
return paths, err
}
}
if backup.Type == "LOCAL" || cronjob.KeepLocal {
paths = append(paths, fmt.Sprintf("%s/%s", record.FileDir, record.FileName))
} else {
paths = append(paths, fmt.Sprintf("%s/%s", itemFileDir, record.FileName))
}
}
u.HandleRmExpired(backup.Type, backup.BackupPath, localDir, &cronjob, client)
return paths, nil
}
func (u *CronjobService) handleWebsite(cronjob model.Cronjob, backup model.BackupAccount, startTime time.Time) ([]string, error) {
var paths []string
localDir, err := loadLocalDir()
if err != nil {
return paths, err
}
weblist := loadWebsForJob(cronjob)
var client cloud_storage.CloudStorageClient
if backup.Type != "LOCAL" {
client, err = NewIBackupService().NewClient(&backup)
if err != nil {
return paths, err
}
}
for _, websiteItem := range weblist {
var record model.BackupRecord
record.Type = "website"
record.Name = websiteItem.PrimaryDomain
record.DetailName = websiteItem.Alias
record.Source = "LOCAL"
record.BackupType = backup.Type
backupDir := path.Join(localDir, fmt.Sprintf("website/%s", websiteItem.PrimaryDomain))
record.FileDir = backupDir
itemFileDir := strings.TrimPrefix(backupDir, localDir+"/")
if !cronjob.KeepLocal && backup.Type != "LOCAL" {
record.Source = backup.Type
record.FileDir = strings.TrimPrefix(backupDir, localDir+"/")
}
record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", websiteItem.PrimaryDomain, startTime.Format("20060102150405"))
if err := handleWebsiteBackup(&websiteItem, backupDir, record.FileName); err != nil {
return paths, err
}
if err := backupRepo.CreateRecord(&record); err != nil {
global.LOG.Errorf("save backup record failed, err: %v", err)
return paths, err
}
if backup.Type != "LOCAL" {
if !cronjob.KeepLocal {
defer func() {
_ = os.RemoveAll(fmt.Sprintf("%s/%s", backupDir, record.FileName))
}()
}
if len(backup.BackupPath) != 0 {
itemFileDir = path.Join(strings.TrimPrefix(backup.BackupPath, "/"), itemFileDir)
}
if _, err = client.Upload(backupDir+"/"+record.FileName, itemFileDir+"/"+record.FileName); err != nil {
return paths, err
}
}
if backup.Type == "LOCAL" || cronjob.KeepLocal {
paths = append(paths, fmt.Sprintf("%s/%s", record.FileDir, record.FileName))
} else {
paths = append(paths, fmt.Sprintf("%s/%s", itemFileDir, record.FileName))
}
}
u.HandleRmExpired(backup.Type, backup.BackupPath, localDir, &cronjob, client)
return paths, nil
}
func (u *CronjobService) handleSnapshot(cronjob *model.Cronjob, startTime time.Time) (string, string, error) {
backup, err := backupRepo.Get(commonRepo.WithByID(uint(cronjob.TargetDirID)))
if err != nil {
return "", "", err
}
client, err := NewIBackupService().NewClient(&backup)
if err != nil {
return "", "", err
}
req := dto.SnapshotCreate{
From: backup.Type,
}
message, name, err := NewISnapshotService().HandleSnapshot(true, req, startTime.Format("20060102150405"))
if err != nil {
return message, "", err
}
path := path.Join(strings.TrimPrefix(backup.BackupPath, "/"), "system_snapshot", name+".tar.gz")
u.HandleRmExpired(backup.Type, backup.BackupPath, "", cronjob, client)
return message, path, nil
}
func (u *CronjobService) handleSystemClean() (string, error) { func (u *CronjobService) handleSystemClean() (string, error) {
return NewIDeviceService().CleanForCronjob() return NewIDeviceService().CleanForCronjob()
} }
func (u *CronjobService) handleSystemLog(cronjob model.Cronjob, startTime time.Time) (string, error) { func (u *CronjobService) loadClientMap(targetAccountIDs string) (map[string]cronjobUploadHelper, error) {
backup, err := backupRepo.Get(commonRepo.WithByID(uint(cronjob.TargetDirID))) clients := make(map[string]cronjobUploadHelper)
accounts, err := backupRepo.List()
if err != nil { if err != nil {
return "", err return nil, err
} }
targets := strings.Split(targetAccountIDs, ",")
pathItem := path.Join(global.CONF.System.BaseDir, "1panel/tmp/log", startTime.Format("20060102150405")) for _, target := range targets {
websites, err := websiteRepo.List() if len(target) == 0 {
if err != nil { continue
return "", err
}
if len(websites) != 0 {
nginxInstall, err := getAppInstallByKey(constant.AppOpenresty)
if err != nil {
return "", err
} }
webItem := path.Join(nginxInstall.GetPath(), "www/sites") for _, account := range accounts {
for _, website := range websites { if target == fmt.Sprintf("%v", account.ID) {
dirItem := path.Join(pathItem, "website", website.Alias) client, err := NewIBackupService().NewClient(&account)
if _, err := os.Stat(dirItem); err != nil && os.IsNotExist(err) { if err != nil {
if err = os.MkdirAll(dirItem, os.ModePerm); err != nil { return nil, err
return "", err
} }
} pathItem := account.BackupPath
itemDir := path.Join(webItem, website.Alias, "log") clients[target] = cronjobUploadHelper{
logFiles, _ := os.ReadDir(itemDir) client: client,
if len(logFiles) != 0 { backupPath: pathItem,
for i := 0; i < len(logFiles); i++ { backType: account.Type,
if !logFiles[i].IsDir() {
_ = cpBinary([]string{path.Join(itemDir, logFiles[i].Name())}, dirItem)
}
}
}
itemDir2 := path.Join(global.CONF.System.Backup, "log/website", website.Alias)
logFiles2, _ := os.ReadDir(itemDir2)
if len(logFiles2) != 0 {
for i := 0; i < len(logFiles2); i++ {
if !logFiles2[i].IsDir() {
_ = cpBinary([]string{path.Join(itemDir2, logFiles2[i].Name())}, dirItem)
}
} }
} }
} }
global.LOG.Debug("backup website log successful!")
} }
return clients, nil
}
systemLogDir := path.Join(global.CONF.System.BaseDir, "1panel/log") type cronjobUploadHelper struct {
systemDir := path.Join(pathItem, "system") backupPath string
if _, err := os.Stat(systemDir); err != nil && os.IsNotExist(err) { backType string
if err = os.MkdirAll(systemDir, os.ModePerm); err != nil { client cloud_storage.CloudStorageClient
return "", err }
}
}
systemLogFiles, _ := os.ReadDir(systemLogDir)
if len(systemLogFiles) != 0 {
for i := 0; i < len(systemLogFiles); i++ {
if !systemLogFiles[i].IsDir() {
_ = cpBinary([]string{path.Join(systemLogDir, systemLogFiles[i].Name())}, systemDir)
}
}
}
global.LOG.Debug("backup system log successful!")
loginLogFiles, _ := os.ReadDir("/var/log") func (u *CronjobService) uploadCronjobBackFile(cronjob model.Cronjob, accountMap map[string]cronjobUploadHelper, file string) (string, error) {
loginDir := path.Join(pathItem, "login")
if _, err := os.Stat(loginDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(loginDir, os.ModePerm); err != nil {
return "", err
}
}
if len(loginLogFiles) != 0 {
for i := 0; i < len(loginLogFiles); i++ {
if !loginLogFiles[i].IsDir() && (strings.HasPrefix(loginLogFiles[i].Name(), "secure") || strings.HasPrefix(loginLogFiles[i].Name(), "auth.log")) {
_ = cpBinary([]string{path.Join("/var/log", loginLogFiles[i].Name())}, loginDir)
}
}
}
global.LOG.Debug("backup ssh log successful!")
fileName := fmt.Sprintf("system_log_%s.tar.gz", startTime.Format("20060102150405"))
targetDir := path.Dir(pathItem)
if err := handleTar(pathItem, targetDir, fileName, ""); err != nil {
return "", err
}
defer func() { defer func() {
os.RemoveAll(pathItem) _ = os.Remove(file)
os.RemoveAll(path.Join(targetDir, fileName))
}() }()
targets := strings.Split(cronjob.TargetAccountIDs, ",")
cloudSrc := strings.TrimPrefix(file, global.CONF.System.TmpDir+"/")
for _, target := range targets {
if len(target) != 0 {
if _, err := accountMap[target].client.Upload(file, path.Join(accountMap[target].backupPath, cloudSrc)); err != nil {
return "", err
}
}
}
return cloudSrc, nil
}
client, err := NewIBackupService().NewClient(&backup) func (u *CronjobService) removeExpiredBackup(cronjob model.Cronjob, accountMap map[string]cronjobUploadHelper, record model.BackupRecord) {
if err != nil { global.LOG.Infof("start to handle remove expired, retain copies: %d", cronjob.RetainCopies)
return "", err var opts []repo.DBOption
opts = append(opts, commonRepo.WithByFrom("cronjob"))
opts = append(opts, backupRepo.WithByCronID(cronjob.ID))
opts = append(opts, commonRepo.WithOrderBy("created_at desc"))
if record.ID != 0 {
opts = append(opts, backupRepo.WithByType(record.Type))
opts = append(opts, commonRepo.WithByName(record.Name))
opts = append(opts, backupRepo.WithByDetailName(record.DetailName))
}
records, _ := backupRepo.ListRecord(opts...)
if len(records) <= int(cronjob.RetainCopies) {
return
}
for i := int(cronjob.RetainCopies); i < len(records); i++ {
targets := strings.Split(cronjob.TargetAccountIDs, ",")
if cronjob.Type == "snapshot" {
for _, target := range targets {
if len(target) != 0 {
_, _ = accountMap[target].client.Delete(path.Join(accountMap[target].backupPath, "system_snapshot", records[i].FileName))
}
}
_ = snapshotRepo.Delete(commonRepo.WithByName(strings.TrimSuffix(records[i].FileName, ".tar.gz")))
} else {
for _, target := range targets {
if len(target) != 0 {
_, _ = accountMap[target].client.Delete(path.Join(accountMap[target].backupPath, records[i].FileDir, records[i].FileName))
}
}
}
_ = backupRepo.DeleteRecord(context.Background(), commonRepo.WithByID(records[i].ID))
}
}
func (u *CronjobService) removeExpiredLog(cronjob model.Cronjob) {
global.LOG.Infof("start to handle remove expired, retain copies: %d", cronjob.RetainCopies)
records, _ := cronjobRepo.ListRecord(cronjobRepo.WithByJobID(int(cronjob.ID)), commonRepo.WithOrderBy("created_at desc"))
if len(records) <= int(cronjob.RetainCopies) {
return
}
for i := int(cronjob.RetainCopies); i < len(records); i++ {
if len(records[i].File) != 0 {
files := strings.Split(records[i].File, ",")
for _, file := range files {
_ = os.Remove(file)
}
}
_ = cronjobRepo.DeleteRecord(commonRepo.WithByID(uint(records[i].ID)))
_ = os.Remove(records[i].Records)
}
}
func (u *CronjobService) generateLogsPath(cronjob model.Cronjob, startTime time.Time) string {
dir := fmt.Sprintf("%s/task/%s/%s", constant.DataDir, cronjob.Type, cronjob.Name)
if _, err := os.Stat(dir); err != nil && os.IsNotExist(err) {
_ = os.MkdirAll(dir, os.ModePerm)
} }
targetPath := "log/" + fileName path := fmt.Sprintf("%s/%s.log", dir, startTime.Format("20060102150405"))
if len(backup.BackupPath) != 0 { return path
targetPath = strings.TrimPrefix(backup.BackupPath, "/") + "/log/" + fileName
}
if _, err = client.Upload(path.Join(targetDir, fileName), targetPath); err != nil {
return "", err
}
u.HandleRmExpired(backup.Type, backup.BackupPath, "", &cronjob, client)
return targetPath, nil
} }
func hasBackup(cronjobType string) bool { func hasBackup(cronjobType string) bool {
return cronjobType == "app" || cronjobType == "database" || cronjobType == "website" || cronjobType == "directory" || cronjobType == "snapshot" || cronjobType == "log" return cronjobType == "app" || cronjobType == "database" || cronjobType == "website" || cronjobType == "directory" || cronjobType == "snapshot" || cronjobType == "log"
} }
type databaseHelper struct {
DBType string
Database string
Name string
}
func loadDbsForJob(cronjob model.Cronjob) []databaseHelper {
var dbs []databaseHelper
if cronjob.DBName == "all" {
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
mysqlItems, _ := mysqlRepo.List()
for _, mysql := range mysqlItems {
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: mysql.MysqlName,
Name: mysql.Name,
})
}
} else {
pgItems, _ := postgresqlRepo.List()
for _, pg := range pgItems {
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: pg.PostgresqlName,
Name: pg.Name,
})
}
}
return dbs
}
itemID, _ := (strconv.Atoi(cronjob.DBName))
if cronjob.DBType == "mysql" || cronjob.DBType == "mariadb" {
mysqlItem, _ := mysqlRepo.Get(commonRepo.WithByID(uint(itemID)))
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: mysqlItem.MysqlName,
Name: mysqlItem.Name,
})
} else {
pgItem, _ := postgresqlRepo.Get(commonRepo.WithByID(uint(itemID)))
dbs = append(dbs, databaseHelper{
DBType: cronjob.DBType,
Database: pgItem.PostgresqlName,
Name: pgItem.Name,
})
}
return dbs
}
func loadWebsForJob(cronjob model.Cronjob) []model.Website {
var weblist []model.Website
if cronjob.Website == "all" {
weblist, _ = websiteRepo.List()
return weblist
}
itemID, _ := (strconv.Atoi(cronjob.Website))
webItem, _ := websiteRepo.GetFirst(commonRepo.WithByID(uint(itemID)))
if webItem.ID != 0 {
weblist = append(weblist, webItem)
}
return weblist
}

View File

@ -41,7 +41,7 @@ type ISnapshotService interface {
UpdateDescription(req dto.UpdateDescription) error UpdateDescription(req dto.UpdateDescription) error
readFromJson(path string) (SnapshotJson, error) readFromJson(path string) (SnapshotJson, error)
HandleSnapshot(isCronjob bool, req dto.SnapshotCreate, timeNow string) (string, string, error) HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string) (string, error)
} }
func NewISnapshotService() ISnapshotService { func NewISnapshotService() ISnapshotService {
@ -132,7 +132,7 @@ type SnapshotJson struct {
} }
func (u *SnapshotService) SnapshotCreate(req dto.SnapshotCreate) error { func (u *SnapshotService) SnapshotCreate(req dto.SnapshotCreate) error {
if _, _, err := u.HandleSnapshot(false, req, time.Now().Format("20060102150405")); err != nil { if _, err := u.HandleSnapshot(false, "", req, time.Now().Format("20060102150405")); err != nil {
return err return err
} }
return nil return nil
@ -469,10 +469,10 @@ func (u *SnapshotService) readFromJson(path string) (SnapshotJson, error) {
return snap, nil return snap, nil
} }
func (u *SnapshotService) HandleSnapshot(isCronjob bool, req dto.SnapshotCreate, timeNow string) (string, string, error) { func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string) (string, error) {
localDir, err := loadLocalDir() localDir, err := loadLocalDir()
if err != nil { if err != nil {
return "", "", err return "", err
} }
var ( var (
rootDir string rootDir string
@ -501,7 +501,7 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, req dto.SnapshotCreate,
} else { } else {
snap, err = snapshotRepo.Get(commonRepo.WithByID(req.ID)) snap, err = snapshotRepo.Get(commonRepo.WithByID(req.ID))
if err != nil { if err != nil {
return "", "", err return "", err
} }
snapStatus, _ = snapshotRepo.GetStatus(snap.ID) snapStatus, _ = snapshotRepo.GetStatus(snap.ID)
if snapStatus.ID == 0 { if snapStatus.ID == 0 {
@ -523,7 +523,7 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, req dto.SnapshotCreate,
BackupDataDir: localDir, BackupDataDir: localDir,
PanelDataDir: path.Join(global.CONF.System.BaseDir, "1panel"), PanelDataDir: path.Join(global.CONF.System.BaseDir, "1panel"),
} }
loadLogByStatus(snapStatus, logPath)
if snapStatus.PanelInfo != constant.StatusDone { if snapStatus.PanelInfo != constant.StatusDone {
wg.Add(1) wg.Add(1)
go snapJson(itemHelper, jsonItem, rootDir) go snapJson(itemHelper, jsonItem, rootDir)
@ -569,30 +569,35 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, req dto.SnapshotCreate,
} }
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusSuccess}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusSuccess})
}() }()
return "", "", nil return "", nil
} }
wg.Wait() wg.Wait()
if !checkIsAllDone(snap.ID) { if !checkIsAllDone(snap.ID) {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
return loadLogByStatus(snapStatus), snap.Name, fmt.Errorf("snapshot %s backup failed", snap.Name) loadLogByStatus(snapStatus, logPath)
return snap.Name, fmt.Errorf("snapshot %s backup failed", snap.Name)
} }
snapPanelData(itemHelper, localDir, backupPanelDir) snapPanelData(itemHelper, localDir, backupPanelDir)
if snapStatus.PanelData != constant.StatusDone { if snapStatus.PanelData != constant.StatusDone {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
return loadLogByStatus(snapStatus), snap.Name, fmt.Errorf("snapshot %s 1panel data failed", snap.Name) loadLogByStatus(snapStatus, logPath)
return snap.Name, fmt.Errorf("snapshot %s 1panel data failed", snap.Name)
} }
snapCompress(itemHelper, rootDir) snapCompress(itemHelper, rootDir)
if snapStatus.Compress != constant.StatusDone { if snapStatus.Compress != constant.StatusDone {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
return loadLogByStatus(snapStatus), snap.Name, fmt.Errorf("snapshot %s compress failed", snap.Name) loadLogByStatus(snapStatus, logPath)
return snap.Name, fmt.Errorf("snapshot %s compress failed", snap.Name)
} }
snapUpload(itemHelper, req.From, fmt.Sprintf("%s.tar.gz", rootDir)) snapUpload(itemHelper, req.From, fmt.Sprintf("%s.tar.gz", rootDir))
if snapStatus.Upload != constant.StatusDone { if snapStatus.Upload != constant.StatusDone {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
return loadLogByStatus(snapStatus), snap.Name, fmt.Errorf("snapshot %s upload failed", snap.Name) loadLogByStatus(snapStatus, logPath)
return snap.Name, fmt.Errorf("snapshot %s upload failed", snap.Name)
} }
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusSuccess}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusSuccess})
return loadLogByStatus(snapStatus), snap.Name, nil loadLogByStatus(snapStatus, logPath)
return snap.Name, nil
} }
func (u *SnapshotService) handleDockerDatas(fileOp files.FileOp, operation string, source, target string) error { func (u *SnapshotService) handleDockerDatas(fileOp files.FileOp, operation string, source, target string) error {
@ -1060,7 +1065,7 @@ func checkIsAllDone(snapID uint) bool {
return true return true
} }
func loadLogByStatus(status model.SnapshotStatus) string { func loadLogByStatus(status model.SnapshotStatus, logPath string) {
logs := "" logs := ""
logs += fmt.Sprintf("Write 1Panel basic information: %s \n", status.PanelInfo) logs += fmt.Sprintf("Write 1Panel basic information: %s \n", status.PanelInfo)
logs += fmt.Sprintf("Backup 1Panel system files: %s \n", status.Panel) logs += fmt.Sprintf("Backup 1Panel system files: %s \n", status.Panel)
@ -1072,5 +1077,11 @@ func loadLogByStatus(status model.SnapshotStatus) string {
logs += fmt.Sprintf("Snapshot size: %s \n", status.Size) logs += fmt.Sprintf("Snapshot size: %s \n", status.Size)
logs += fmt.Sprintf("Upload snapshot file: %s \n", status.Upload) logs += fmt.Sprintf("Upload snapshot file: %s \n", status.Upload)
return logs file, err := os.OpenFile(logPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
if err != nil {
global.LOG.Errorf("write snapshot logs failed, err: %v", err)
return
}
defer file.Close()
_, _ = file.Write([]byte(logs))
} }

View File

@ -175,7 +175,7 @@ func snapCompress(snap snapHelper, rootDir string) {
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": constant.StatusDone, "size": size}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": constant.StatusDone, "size": size})
} }
func snapUpload(snap snapHelper, account string, file string) { func snapUpload(snap snapHelper, accounts string, file string) {
source := path.Join(global.CONF.System.TmpDir, "system", path.Base(file)) source := path.Join(global.CONF.System.TmpDir, "system", path.Base(file))
defer func() { defer func() {
global.LOG.Debugf("remove snapshot file %s", source) global.LOG.Debugf("remove snapshot file %s", source)
@ -183,24 +183,20 @@ func snapUpload(snap snapHelper, account string, file string) {
}() }()
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": constant.StatusUploading}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": constant.StatusUploading})
backup, err := backupRepo.Get(commonRepo.WithByType(account)) accountMap, err := loadClientMapForSnapshot(accounts)
if err != nil { if err != nil {
snap.Status.Upload = err.Error() snap.Status.Upload = err.Error()
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()})
return return
} }
client, err := NewIBackupService().NewClient(&backup) targetAccounts := strings.Split(accounts, ",")
if err != nil { for _, item := range targetAccounts {
snap.Status.Upload = err.Error() global.LOG.Debugf("start upload snapshot to %s, dir: %s", item, path.Join(accountMap[item].backupPath, "system_snapshot", path.Base(file)))
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()}) if _, err := accountMap[item].client.Upload(source, path.Join(accountMap[item].backupPath, "system_snapshot", path.Base(file))); err != nil {
return snap.Status.Upload = err.Error()
} _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()})
target := path.Join(strings.TrimPrefix(backup.BackupPath, "/"), "system_snapshot", path.Base(file)) return
global.LOG.Debugf("start upload snapshot to %s, dir: %s", backup.Type, target) }
if _, err := client.Upload(source, target); err != nil {
snap.Status.Upload = err.Error()
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": err.Error()})
return
} }
snap.Status.Upload = constant.StatusDone snap.Status.Upload = constant.StatusDone
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": constant.StatusDone}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"upload": constant.StatusDone})
@ -241,3 +237,32 @@ func checkPointOfWal() {
global.LOG.Errorf("handle check point failed, err: %v", err) global.LOG.Errorf("handle check point failed, err: %v", err)
} }
} }
func loadClientMapForSnapshot(from string) (map[string]cronjobUploadHelper, error) {
clients := make(map[string]cronjobUploadHelper)
accounts, err := backupRepo.List()
if err != nil {
return nil, err
}
targets := strings.Split(from, ",")
for _, target := range targets {
if len(target) == 0 {
continue
}
for _, account := range accounts {
if target == fmt.Sprintf("%v", account.ID) {
client, err := NewIBackupService().NewClient(&account)
if err != nil {
return nil, err
}
pathItem := account.BackupPath
clients[target] = cronjobUploadHelper{
client: client,
backupPath: pathItem,
backType: account.Type,
}
}
}
}
return clients, nil
}

View File

@ -12,6 +12,7 @@ const (
Cos = "COS" Cos = "COS"
Kodo = "KODO" Kodo = "KODO"
WebDAV = "WebDAV" WebDAV = "WebDAV"
Local = "LOCAL"
OneDriveRedirectURI = "http://localhost/login/authorized" OneDriveRedirectURI = "http://localhost/login/authorized"
) )

View File

@ -68,6 +68,7 @@ func Init() {
migrations.UpdateCronjobWithWebsite, migrations.UpdateCronjobWithWebsite,
migrations.UpdateOneDriveToken, migrations.UpdateOneDriveToken,
migrations.UpdateCronjobSpec, migrations.UpdateCronjobSpec,
migrations.UpdateBackupRecordPath,
}) })
if err := m.Migrate(); err != nil { if err := m.Migrate(); err != nil {
global.LOG.Error(err) global.LOG.Error(err)

View File

@ -3,6 +3,10 @@ package migrations
import ( import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"errors"
"fmt"
"path"
"strings"
"time" "time"
"github.com/1Panel-dev/1Panel/backend/app/dto/request" "github.com/1Panel-dev/1Panel/backend/app/dto/request"
@ -261,13 +265,161 @@ var UpdateCronjobSpec = &gormigrate.Migration{
if err := tx.AutoMigrate(&model.Cronjob{}); err != nil { if err := tx.AutoMigrate(&model.Cronjob{}); err != nil {
return err return err
} }
if err := tx.AutoMigrate(&model.BackupRecord{}); err != nil {
return err
}
var (
jobs []model.Cronjob
backupAccounts []model.BackupAccount
localAccountID uint
)
mapAccount := make(map[uint]string)
mapAccountName := make(map[string]model.BackupAccount)
if err := tx.Find(&jobs).Error; err != nil {
return err
}
_ = tx.Find(&backupAccounts).Error
for _, item := range backupAccounts {
mapAccount[item.ID] = item.Type
mapAccountName[item.Type] = item
if item.Type == constant.Local {
localAccountID = item.ID
}
}
if localAccountID == 0 {
return errors.New("local backup account is unset!")
}
for _, job := range jobs {
if job.KeepLocal {
if err := tx.Model(&model.Cronjob{}).
Where("id = ?", job.ID).
Updates(map[string]interface{}{
"target_account_ids": fmt.Sprintf("%v,%v", job.TargetDirID, localAccountID),
"target_dir_id": localAccountID,
}).Error; err != nil {
return err
}
} else {
if err := tx.Model(&model.Cronjob{}).
Where("id = ?", job.ID).
Updates(map[string]interface{}{
"target_account_ids": job.TargetDirID,
}).Error; err != nil {
return err
}
}
if job.Type != "directory" && job.Type != "database" && job.Type != "website" && job.Type != "app" && job.Type != "snapshot" && job.Type != "log" {
continue
}
var records []model.JobRecords
_ = tx.Where("cronjob_id = ?", job.ID).Find(&records).Error
for _, record := range records {
if job.Type == "snapshot" {
var snaps []model.Snapshot
_ = tx.Where("name like ?", "snapshot_"+"%").Find(&snaps).Error
for _, snap := range snaps {
item := model.BackupRecord{
From: "cronjob",
CronjobID: job.ID,
Type: "snapshot",
Name: job.Name,
FileName: snap.Name + ".tar.gz",
Source: snap.From,
BackupType: snap.From,
}
_ = tx.Create(&item).Error
}
continue
}
if job.Type == "log" {
item := model.BackupRecord{
From: "cronjob",
CronjobID: job.ID,
Type: "log",
Name: job.Name,
FileDir: path.Dir(record.File),
FileName: path.Base(record.File),
Source: mapAccount[uint(job.TargetDirID)],
BackupType: mapAccount[uint(job.TargetDirID)],
}
_ = tx.Create(&item).Error
continue
}
if job.Type == "directory" {
item := model.BackupRecord{
From: "cronjob",
CronjobID: job.ID,
Type: "directory",
Name: job.Name,
FileDir: path.Dir(record.File),
FileName: path.Base(record.File),
BackupType: mapAccount[uint(job.TargetDirID)],
}
if record.FromLocal {
item.Source = constant.Local
} else {
item.Source = mapAccount[uint(job.TargetDirID)]
}
_ = tx.Create(&item).Error
continue
}
if strings.Contains(record.File, ",") {
files := strings.Split(record.File, ",")
for _, file := range files {
_ = tx.Model(&model.BackupRecord{}).
Where("file_dir = ? AND file_name = ?", path.Dir(file), path.Base(file)).
Updates(map[string]interface{}{"cronjob_id": job.ID, "from": "cronjob"}).Error
}
} else {
_ = tx.Model(&model.BackupRecord{}).
Where("file_dir = ? AND file_name = ?", path.Dir(record.File), path.Base(record.File)).
Updates(map[string]interface{}{"cronjob_id": job.ID, "from": "cronjob"}).Error
}
}
}
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN spec_type;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN spec_type;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN week;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN week;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN day;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN day;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN hour;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN hour;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN minute;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN minute;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN second;").Error _ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN second;").Error
_ = tx.Exec("ALTER TABLE cronjobs DROP COLUMN entry_id;").Error
return nil return nil
}, },
} }
var UpdateBackupRecordPath = &gormigrate.Migration{
ID: "20240124-update-cronjob-spec",
Migrate: func(tx *gorm.DB) error {
var (
backupRecords []model.BackupRecord
localAccount model.BackupAccount
)
_ = tx.Where("type = ?", "LOCAL").First(&localAccount).Error
if localAccount.ID == 0 {
return nil
}
varMap := make(map[string]string)
if err := json.Unmarshal([]byte(localAccount.Vars), &varMap); err != nil {
return err
}
dir, ok := varMap["dir"]
if !ok {
return errors.New("load local backup dir failed")
}
if dir != "/" {
dir += "/"
}
_ = tx.Where("source = ?", "LOCAL").Find(&backupRecords).Error
for _, record := range backupRecords {
_ = tx.Model(&model.BackupRecord{}).
Where("id = ?", record.ID).
Updates(map[string]interface{}{"file_dir": strings.TrimPrefix(record.FileDir, dir)}).Error
}
return nil
},
}

View File

@ -54,6 +54,7 @@ func (s *SettingRouter) InitRouter(Router *gin.RouterGroup) {
settingRouter.POST("/backup/del", baseApi.DeleteBackup) settingRouter.POST("/backup/del", baseApi.DeleteBackup)
settingRouter.POST("/backup/update", baseApi.UpdateBackup) settingRouter.POST("/backup/update", baseApi.UpdateBackup)
settingRouter.POST("/backup/record/search", baseApi.SearchBackupRecords) settingRouter.POST("/backup/record/search", baseApi.SearchBackupRecords)
settingRouter.POST("/backup/record/search/bycronjob", baseApi.SearchBackupRecordsByCronjob)
settingRouter.POST("/backup/record/download", baseApi.DownloadRecord) settingRouter.POST("/backup/record/download", baseApi.DownloadRecord)
settingRouter.POST("/backup/record/del", baseApi.DeleteBackupRecord) settingRouter.POST("/backup/record/del", baseApi.DeleteBackupRecord)

View File

@ -0,0 +1,69 @@
package client
import (
"fmt"
"os"
"path"
"github.com/1Panel-dev/1Panel/backend/utils/cmd"
)
type localClient struct {
dir string
}
func NewLocalClient(vars map[string]interface{}) (*localClient, error) {
dir := loadParamFromVars("dir", true, vars)
return &localClient{dir: dir}, nil
}
func (c localClient) ListBuckets() ([]interface{}, error) {
return nil, nil
}
func (c localClient) Exist(file string) (bool, error) {
_, err := os.Stat(path.Join(c.dir, file))
return err == nil, err
}
func (c localClient) Size(file string) (int64, error) {
fileInfo, err := os.Stat(path.Join(c.dir, file))
if err != nil {
return 0, err
}
return fileInfo.Size(), nil
}
func (c localClient) Delete(file string) (bool, error) {
if err := os.RemoveAll(path.Join(c.dir, file)); err != nil {
return false, err
}
return true, nil
}
func (c localClient) Upload(src, target string) (bool, error) {
targetFilePath := path.Join(c.dir, target)
if _, err := os.Stat(path.Dir(targetFilePath)); err != nil {
if os.IsNotExist(err) {
if err = os.MkdirAll(path.Dir(targetFilePath), os.ModePerm); err != nil {
return false, err
}
} else {
return false, err
}
}
stdout, err := cmd.Execf("\\cp -f %s %s", src, path.Join(c.dir, target))
if err != nil {
return false, fmt.Errorf("cp file failed, stdout: %v, err: %v", stdout, err)
}
return true, nil
}
func (c localClient) Download(src, target string) (bool, error) {
return true, nil
}
func (c localClient) ListObjects(prefix string) ([]string, error) {
return nil, nil
}

View File

@ -56,6 +56,17 @@ func (s sftpClient) Upload(src, target string) (bool, error) {
} }
defer srcFile.Close() defer srcFile.Close()
targetFilePath := path.Join(s.bucket, target)
targetDir, _ := path.Split(targetFilePath)
if _, err = client.Stat(targetDir); err != nil {
if os.IsNotExist(err) {
if err = client.MkdirAll(targetDir); err != nil {
return false, err
}
} else {
return false, err
}
}
dstFile, err := client.Create(path.Join(s.bucket, target)) dstFile, err := client.Create(path.Join(s.bucket, target))
if err != nil { if err != nil {
return false, err return false, err

View File

@ -18,6 +18,8 @@ type CloudStorageClient interface {
func NewCloudStorageClient(backupType string, vars map[string]interface{}) (CloudStorageClient, error) { func NewCloudStorageClient(backupType string, vars map[string]interface{}) (CloudStorageClient, error) {
switch backupType { switch backupType {
case constant.Local:
return client.NewLocalClient(vars)
case constant.S3: case constant.S3:
return client.NewS3Client(vars) return client.NewS3Client(vars)
case constant.OSS: case constant.OSS:

View File

@ -4,7 +4,7 @@ import (
"bytes" "bytes"
"context" "context"
"fmt" "fmt"
"io" "os"
"os/exec" "os/exec"
"strings" "strings"
"time" "time"
@ -74,24 +74,27 @@ func ExecContainerScript(containerName, cmdStr string, timeout time.Duration) er
return nil return nil
} }
func ExecCronjobWithTimeOut(cmdStr string, workdir string, timeout time.Duration) (string, error) { func ExecCronjobWithTimeOut(cmdStr, workdir, outPath string, timeout time.Duration) error {
ctx, cancel := context.WithTimeout(context.Background(), timeout) ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel() defer cancel()
file, err := os.OpenFile(outPath, os.O_WRONLY|os.O_CREATE, 0666)
if err != nil {
return err
}
defer file.Close()
cmd := exec.Command("bash", "-c", cmdStr) cmd := exec.Command("bash", "-c", cmdStr)
cmd.Dir = workdir cmd.Dir = workdir
var stdout, stderr bytes.Buffer cmd.Stdout = file
cmd.Stdout = &stdout cmd.Stderr = file
cmd.Stderr = &stderr
output := new(bytes.Buffer)
cmd.Stdout = io.MultiWriter(output, cmd.Stdout)
cmd.Stderr = io.MultiWriter(output, cmd.Stderr)
err := cmd.Run() err = cmd.Run()
if ctx.Err() == context.DeadlineExceeded { if ctx.Err() == context.DeadlineExceeded {
return "", buserr.New(constant.ErrCmdTimeout) return buserr.New(constant.ErrCmdTimeout)
} }
return output.String(), err return err
} }
func Execf(cmdStr string, a ...interface{}) (string, error) { func Execf(cmdStr string, a ...interface{}) (string, error) {

View File

@ -9523,6 +9523,39 @@ const docTemplate = `{
} }
} }
}, },
"/settings/backup/record/search/bycronjob": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "通过计划任务获取备份记录列表分页",
"consumes": [
"application/json"
],
"tags": [
"Backup Account"
],
"summary": "Page backup records by cronjob",
"parameters": [
{
"description": "request",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dto.RecordSearchByCronjob"
}
}
],
"responses": {
"200": {
"description": "OK"
}
}
}
},
"/settings/backup/recover": { "/settings/backup/recover": {
"post": { "post": {
"security": [ "security": [
@ -14937,9 +14970,6 @@ const docTemplate = `{
"exclusionRules": { "exclusionRules": {
"type": "string" "type": "string"
}, },
"keepLocal": {
"type": "boolean"
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@ -14956,6 +14986,9 @@ const docTemplate = `{
"spec": { "spec": {
"type": "string" "type": "string"
}, },
"targetAccountIDs": {
"type": "string"
},
"targetDirID": { "targetDirID": {
"type": "integer" "type": "integer"
}, },
@ -15011,9 +15044,6 @@ const docTemplate = `{
"id": { "id": {
"type": "integer" "type": "integer"
}, },
"keepLocal": {
"type": "boolean"
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@ -15030,6 +15060,9 @@ const docTemplate = `{
"spec": { "spec": {
"type": "string" "type": "string"
}, },
"targetAccountIDs": {
"type": "string"
},
"targetDirID": { "targetDirID": {
"type": "integer" "type": "integer"
}, },
@ -17232,6 +17265,25 @@ const docTemplate = `{
} }
} }
}, },
"dto.RecordSearchByCronjob": {
"type": "object",
"required": [
"cronjobID",
"page",
"pageSize"
],
"properties": {
"cronjobID": {
"type": "integer"
},
"page": {
"type": "integer"
},
"pageSize": {
"type": "integer"
}
}
},
"dto.RedisConf": { "dto.RedisConf": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -17848,17 +17900,7 @@ const docTemplate = `{
"maxLength": 256 "maxLength": 256
}, },
"from": { "from": {
"type": "string", "type": "string"
"enum": [
"OSS",
"S3",
"SFTP",
"MINIO",
"COS",
"KODO",
"OneDrive",
"WebDAV"
]
}, },
"id": { "id": {
"type": "integer" "type": "integer"

View File

@ -9516,6 +9516,39 @@
} }
} }
}, },
"/settings/backup/record/search/bycronjob": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "通过计划任务获取备份记录列表分页",
"consumes": [
"application/json"
],
"tags": [
"Backup Account"
],
"summary": "Page backup records by cronjob",
"parameters": [
{
"description": "request",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dto.RecordSearchByCronjob"
}
}
],
"responses": {
"200": {
"description": "OK"
}
}
}
},
"/settings/backup/recover": { "/settings/backup/recover": {
"post": { "post": {
"security": [ "security": [
@ -14930,9 +14963,6 @@
"exclusionRules": { "exclusionRules": {
"type": "string" "type": "string"
}, },
"keepLocal": {
"type": "boolean"
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@ -14949,6 +14979,9 @@
"spec": { "spec": {
"type": "string" "type": "string"
}, },
"targetAccountIDs": {
"type": "string"
},
"targetDirID": { "targetDirID": {
"type": "integer" "type": "integer"
}, },
@ -15004,9 +15037,6 @@
"id": { "id": {
"type": "integer" "type": "integer"
}, },
"keepLocal": {
"type": "boolean"
},
"name": { "name": {
"type": "string" "type": "string"
}, },
@ -15023,6 +15053,9 @@
"spec": { "spec": {
"type": "string" "type": "string"
}, },
"targetAccountIDs": {
"type": "string"
},
"targetDirID": { "targetDirID": {
"type": "integer" "type": "integer"
}, },
@ -17225,6 +17258,25 @@
} }
} }
}, },
"dto.RecordSearchByCronjob": {
"type": "object",
"required": [
"cronjobID",
"page",
"pageSize"
],
"properties": {
"cronjobID": {
"type": "integer"
},
"page": {
"type": "integer"
},
"pageSize": {
"type": "integer"
}
}
},
"dto.RedisConf": { "dto.RedisConf": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -17841,17 +17893,7 @@
"maxLength": 256 "maxLength": 256
}, },
"from": { "from": {
"type": "string", "type": "string"
"enum": [
"OSS",
"S3",
"SFTP",
"MINIO",
"COS",
"KODO",
"OneDrive",
"WebDAV"
]
}, },
"id": { "id": {
"type": "integer" "type": "integer"

View File

@ -588,8 +588,6 @@ definitions:
type: string type: string
exclusionRules: exclusionRules:
type: string type: string
keepLocal:
type: boolean
name: name:
type: string type: string
retainCopies: retainCopies:
@ -601,6 +599,8 @@ definitions:
type: string type: string
spec: spec:
type: string type: string
targetAccountIDs:
type: string
targetDirID: targetDirID:
type: integer type: integer
type: type:
@ -638,8 +638,6 @@ definitions:
type: string type: string
id: id:
type: integer type: integer
keepLocal:
type: boolean
name: name:
type: string type: string
retainCopies: retainCopies:
@ -651,6 +649,8 @@ definitions:
type: string type: string
spec: spec:
type: string type: string
targetAccountIDs:
type: string
targetDirID: targetDirID:
type: integer type: integer
url: url:
@ -2147,6 +2147,19 @@ definitions:
- pageSize - pageSize
- type - type
type: object type: object
dto.RecordSearchByCronjob:
properties:
cronjobID:
type: integer
page:
type: integer
pageSize:
type: integer
required:
- cronjobID
- page
- pageSize
type: object
dto.RedisConf: dto.RedisConf:
properties: properties:
containerName: containerName:
@ -2555,15 +2568,6 @@ definitions:
maxLength: 256 maxLength: 256
type: string type: string
from: from:
enum:
- OSS
- S3
- SFTP
- MINIO
- COS
- KODO
- OneDrive
- WebDAV
type: string type: string
id: id:
type: integer type: integer
@ -11008,6 +11012,26 @@ paths:
summary: Page backup records summary: Page backup records
tags: tags:
- Backup Account - Backup Account
/settings/backup/record/search/bycronjob:
post:
consumes:
- application/json
description: 通过计划任务获取备份记录列表分页
parameters:
- description: request
in: body
name: request
required: true
schema:
$ref: '#/definitions/dto.RecordSearchByCronjob'
responses:
"200":
description: OK
security:
- ApiKeyAuth: []
summary: Page backup records by cronjob
tags:
- Backup Account
/settings/backup/recover: /settings/backup/recover:
post: post:
consumes: consumes:

View File

@ -50,6 +50,9 @@ export namespace Backup {
name: string; name: string;
detailName: string; detailName: string;
} }
export interface SearchBackupRecordByCronjob extends ReqPage {
cronjobID: number;
}
export interface Backup { export interface Backup {
type: string; type: string;
name: string; name: string;

View File

@ -18,9 +18,9 @@ export namespace Cronjob {
dbName: string; dbName: string;
url: string; url: string;
sourceDir: string; sourceDir: string;
keepLocal: boolean;
targetDirID: number; targetDirID: number;
targetDir: string; targetAccountIDs: string;
targetAccountIDList: Array<number>;
retainCopies: number; retainCopies: number;
status: string; status: string;
} }
@ -37,8 +37,8 @@ export namespace Cronjob {
dbName: string; dbName: string;
url: string; url: string;
sourceDir: string; sourceDir: string;
keepLocal: boolean;
targetDirID: number; targetDirID: number;
targetAccountIDs: string;
retainCopies: number; retainCopies: number;
} }
export interface SpecObj { export interface SpecObj {
@ -60,8 +60,8 @@ export namespace Cronjob {
dbName: string; dbName: string;
url: string; url: string;
sourceDir: string; sourceDir: string;
keepLocal: boolean;
targetDirID: number; targetDirID: number;
targetAccountIDs: string;
retainCopies: number; retainCopies: number;
} }
export interface CronjobDelete { export interface CronjobDelete {

View File

@ -96,6 +96,9 @@ export const deleteBackupRecord = (params: { ids: number[] }) => {
export const searchBackupRecords = (params: Backup.SearchBackupRecord) => { export const searchBackupRecords = (params: Backup.SearchBackupRecord) => {
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params); return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search`, params);
}; };
export const searchBackupRecordsByCronjob = (params: Backup.SearchBackupRecordByCronjob) => {
return http.post<ResPage<Backup.RecordInfo>>(`/settings/backup/record/search/bycronjob`, params);
};
export const getBackupList = () => { export const getBackupList = () => {
return http.get<Array<Backup.BackupInfo>>(`/settings/backup/search`); return http.get<Array<Backup.BackupInfo>>(`/settings/backup/search`);

View File

@ -812,6 +812,7 @@ const message = {
allOptionHelper: allOptionHelper:
'The current task plan is to back up all [{0}]. Direct download is not supported at the moment. You can check the backup list of [{0}] menu.', 'The current task plan is to back up all [{0}]. Direct download is not supported at the moment. You can check the backup list of [{0}] menu.',
exclusionRules: 'Exclusive rule', exclusionRules: 'Exclusive rule',
default_download_path: 'Default Download Link',
saveLocal: 'Retain local backups (the same as the number of cloud storage copies)', saveLocal: 'Retain local backups (the same as the number of cloud storage copies)',
url: 'URL Address', url: 'URL Address',
target: 'Target', target: 'Target',

View File

@ -773,6 +773,7 @@ const message = {
snapshot: '系統快照', snapshot: '系統快照',
allOptionHelper: '當前計劃任務為備份所有{0}暫不支持直接下載可在{0}備份列表中查看', allOptionHelper: '當前計劃任務為備份所有{0}暫不支持直接下載可在{0}備份列表中查看',
exclusionRules: '排除規則', exclusionRules: '排除規則',
default_download_path: '默認下載地址',
saveLocal: '同時保留本地備份和雲存儲保留份數一致', saveLocal: '同時保留本地備份和雲存儲保留份數一致',
url: 'URL 地址', url: 'URL 地址',
target: '備份到', target: '備份到',

View File

@ -774,6 +774,7 @@ const message = {
snapshot: '系统快照', snapshot: '系统快照',
allOptionHelper: '当前计划任务为备份所有{0}暂不支持直接下载可在{0}备份列表中查看', allOptionHelper: '当前计划任务为备份所有{0}暂不支持直接下载可在{0}备份列表中查看',
exclusionRules: '排除规则', exclusionRules: '排除规则',
default_download_path: '默认下载地址',
saveLocal: '同时保留本地备份和云存储保留份数一致', saveLocal: '同时保留本地备份和云存储保留份数一致',
url: 'URL 地址', url: 'URL 地址',
target: '备份到', target: '备份到',

View File

@ -0,0 +1,127 @@
<template>
<div>
<el-drawer v-model="backupVisible" :destroy-on-close="true" :close-on-click-modal="false" size="50%">
<template #header>
<DrawerHeader
v-if="cronjob"
:header="$t('commons.button.backup')"
:resource="cronjob"
:back="handleClose"
/>
<DrawerHeader v-else :header="$t('commons.button.backup')" :resource="cronjob" :back="handleClose" />
</template>
<ComplexTable
v-loading="loading"
:pagination-config="paginationConfig"
v-model:selects="selects"
@search="search"
:data="data"
>
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
<el-table-column :label="$t('file.size')" prop="size" show-overflow-tooltip>
<template #default="{ row }">
<span v-if="row.size">
{{ computeSize(row.size) }}
</span>
<span v-else>-</span>
</template>
</el-table-column>
<el-table-column :label="$t('database.source')" prop="backupType">
<template #default="{ row }">
<span v-if="row.source">
{{ $t('setting.' + row.source) }}
</span>
</template>
</el-table-column>
<el-table-column
prop="createdAt"
:label="$t('commons.table.date')"
:formatter="dateFormat"
show-overflow-tooltip
/>
<fu-table-operations width="130px" :buttons="buttons" :label="$t('commons.table.operate')" fix />
</ComplexTable>
</el-drawer>
</div>
</template>
<script lang="ts" setup>
import { reactive, ref } from 'vue';
import { computeSize, dateFormat, downloadFile } from '@/utils/util';
import i18n from '@/lang';
import DrawerHeader from '@/components/drawer-header/index.vue';
import { downloadBackupRecord, searchBackupRecordsByCronjob } from '@/api/modules/setting';
import { Backup } from '@/api/interface/backup';
const selects = ref<any>([]);
const loading = ref();
const data = ref();
const paginationConfig = reactive({
cacheSizeKey: 'backup-cronjob-page-size',
currentPage: 1,
pageSize: 10,
total: 0,
});
const backupVisible = ref(false);
const cronjob = ref();
const cronjobID = ref();
interface DialogProps {
cronjob: string;
cronjobID: number;
}
const acceptParams = (params: DialogProps): void => {
cronjob.value = params.cronjob;
cronjobID.value = params.cronjobID;
backupVisible.value = true;
search();
};
const handleClose = () => {
backupVisible.value = false;
};
const search = async () => {
let params = {
page: paginationConfig.currentPage,
pageSize: paginationConfig.pageSize,
cronjobID: cronjobID.value,
};
loading.value = true;
await searchBackupRecordsByCronjob(params)
.then((res) => {
loading.value = false;
data.value = res.data.items || [];
paginationConfig.total = res.data.total;
})
.catch(() => {
loading.value = false;
});
};
const onDownload = async (row: Backup.RecordInfo) => {
let params = {
source: row.source,
fileDir: row.fileDir,
fileName: row.fileName,
};
await downloadBackupRecord(params).then(async (res) => {
downloadFile(res.data);
});
};
const buttons = [
{
label: i18n.global.t('commons.button.download'),
click: (row: Backup.RecordInfo) => {
onDownload(row);
},
},
];
defineExpose({
acceptParams,
});
</script>

View File

@ -2,6 +2,52 @@ import { Cronjob } from '@/api/interface/cronjob';
import i18n from '@/lang'; import i18n from '@/lang';
import { loadZero } from '@/utils/util'; import { loadZero } from '@/utils/util';
export const shortcuts = [
{
text: i18n.global.t('monitor.today'),
value: () => {
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(new Date().setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.yesterday'),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 1);
const end = new Date(itemDate.setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [3]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 3);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [7]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 7);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [30]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 30);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
];
export const specOptions = [ export const specOptions = [
{ label: i18n.global.t('cronjob.perMonth'), value: 'perMonth' }, { label: i18n.global.t('cronjob.perMonth'), value: 'perMonth' },
{ label: i18n.global.t('cronjob.perWeek'), value: 'perWeek' }, { label: i18n.global.t('cronjob.perWeek'), value: 'perWeek' },

View File

@ -100,8 +100,14 @@
</div> </div>
</template> </template>
</el-table-column> </el-table-column>
<el-table-column :label="$t('cronjob.retainCopies')" :min-width="90" prop="retainCopies" /> <el-table-column :label="$t('cronjob.retainCopies')" :min-width="90" prop="retainCopies">
<template #default="{ row }">
<el-button v-if="hasBackup(row.type)" @click="loadBackups(row)" link type="primary">
{{ row.retainCopies }}
</el-button>
<span v-else>{{ row.retainCopies }}</span>
</template>
</el-table-column>
<el-table-column :label="$t('cronjob.lastRecordTime')" :min-width="120" prop="lastRecordTime"> <el-table-column :label="$t('cronjob.lastRecordTime')" :min-width="120" prop="lastRecordTime">
<template #default="{ row }"> <template #default="{ row }">
{{ row.lastRecordTime }} {{ row.lastRecordTime }}
@ -109,7 +115,30 @@
</el-table-column> </el-table-column>
<el-table-column :min-width="80" :label="$t('cronjob.target')" prop="targetDir"> <el-table-column :min-width="80" :label="$t('cronjob.target')" prop="targetDir">
<template #default="{ row }"> <template #default="{ row }">
{{ row.targetDir }} <div v-for="(item, index) of row.targetAccounts.split(',')" :key="index" class="mt-1">
<div v-if="row.accountExpand || (!row.accountExpand && index < 3)">
<el-tag v-if="row.targetAccounts">
<span v-if="item === row.targetDir">
<el-icon><Star /></el-icon>
{{ $t('setting.' + item) }}
</span>
<span v-else>
{{ $t('setting.' + item) }}
</span>
</el-tag>
<span v-else>-</span>
</div>
</div>
<div v-if="!row.accountExpand && row.targetAccounts.split(',').length > 3">
<el-button type="primary" link @click="row.accountExpand = true">
{{ $t('commons.button.expand') }}...
</el-button>
</div>
<div v-if="row.accountExpand && row.targetAccounts.split(',').length > 3">
<el-button type="primary" link @click="row.accountExpand = false">
{{ $t('commons.button.collapse') }}
</el-button>
</div>
</template> </template>
</el-table-column> </el-table-column>
<fu-table-operations <fu-table-operations
@ -137,6 +166,7 @@
</OpDialog> </OpDialog>
<OperateDialog @search="search" ref="dialogRef" /> <OperateDialog @search="search" ref="dialogRef" />
<Records @search="search" ref="dialogRecordRef" /> <Records @search="search" ref="dialogRecordRef" />
<Backups @search="search" ref="dialogBackupRef" />
</div> </div>
</template> </template>
@ -146,6 +176,7 @@ import TableSetting from '@/components/table-setting/index.vue';
import Tooltip from '@/components/tooltip/index.vue'; import Tooltip from '@/components/tooltip/index.vue';
import OperateDialog from '@/views/cronjob/operate/index.vue'; import OperateDialog from '@/views/cronjob/operate/index.vue';
import Records from '@/views/cronjob/record/index.vue'; import Records from '@/views/cronjob/record/index.vue';
import Backups from '@/views/cronjob/backup/index.vue';
import { onMounted, reactive, ref } from 'vue'; import { onMounted, reactive, ref } from 'vue';
import { deleteCronjob, getCronjobPage, handleOnce, updateStatus } from '@/api/modules/cronjob'; import { deleteCronjob, getCronjobPage, handleOnce, updateStatus } from '@/api/modules/cronjob';
import i18n from '@/lang'; import i18n from '@/lang';
@ -190,9 +221,16 @@ const search = async (column?: any) => {
loading.value = false; loading.value = false;
data.value = res.data.items || []; data.value = res.data.items || [];
for (const item of data.value) { for (const item of data.value) {
if (item.targetDir !== '-' && item.targetDir !== '') { item.targetAccounts = item.targetAccounts.split(',') || [];
item.targetDir = i18n.global.t('setting.' + item.targetDir); let accounts = [];
for (const account of item.targetAccounts) {
if (account == item.targetDir) {
accounts.unshift(account);
} else {
accounts.push(account);
}
} }
item.targetAccounts = accounts.join(',');
} }
paginationConfig.total = res.data.total; paginationConfig.total = res.data.total;
}) })
@ -202,6 +240,7 @@ const search = async (column?: any) => {
}; };
const dialogRecordRef = ref(); const dialogRecordRef = ref();
const dialogBackupRef = ref();
const dialogRef = ref(); const dialogRef = ref();
const onOpenDialog = async ( const onOpenDialog = async (
@ -218,7 +257,6 @@ const onOpenDialog = async (
}, },
], ],
type: 'shell', type: 'shell',
keepLocal: true,
retainCopies: 7, retainCopies: 7,
}, },
) => { ) => {
@ -294,6 +332,10 @@ const onBatchChangeStatus = async (status: string) => {
}); });
}; };
const loadBackups = async (row: any) => {
dialogBackupRef.value!.acceptParams({ cronjobID: row.id, cronjob: row.name });
};
const onHandle = async (row: Cronjob.CronjobInfo) => { const onHandle = async (row: Cronjob.CronjobInfo) => {
loading.value = true; loading.value = true;
await handleOnce(row.id) await handleOnce(row.id)

View File

@ -114,10 +114,11 @@
type="primary" type="primary"
style="float: right; margin-top: 5px" style="float: right; margin-top: 5px"
@click="handleSpecDelete(index)" @click="handleSpecDelete(index)"
v-if="dialogData.rowData.specObjs.length > 1"
> >
{{ $t('commons.button.delete') }} {{ $t('commons.button.delete') }}
</el-button> </el-button>
<el-divider class="divider" /> <el-divider v-if="dialogData.rowData.specObjs.length > 1" class="divider" />
</div> </div>
<el-button class="mt-3" @click="handleSpecAdd()"> <el-button class="mt-3" @click="handleSpecAdd()">
{{ $t('commons.button.add') }} {{ $t('commons.button.add') }}
@ -239,14 +240,15 @@
</el-form-item> </el-form-item>
<div v-if="isBackup()"> <div v-if="isBackup()">
<el-form-item :label="$t('cronjob.target')" prop="targetDirID"> <el-form-item :label="$t('cronjob.target')" prop="targetAccountIDList">
<el-select class="selectClass" v-model="dialogData.rowData!.targetDirID"> <el-select
multiple
class="selectClass"
v-model="dialogData.rowData!.targetAccountIDList"
@change="changeAccount"
>
<div v-for="item in backupOptions" :key="item.label"> <div v-for="item in backupOptions" :key="item.label">
<el-option <el-option :value="item.value" :label="item.label" />
v-if="item.label !== $t('setting.LOCAL') || (dialogData.rowData!.type !== 'snapshot' && dialogData.rowData!.type !== 'log')"
:value="item.value"
:label="item.label"
/>
</div> </div>
</el-select> </el-select>
<span class="input-help"> <span class="input-help">
@ -261,12 +263,12 @@
</el-link> </el-link>
</span> </span>
</el-form-item> </el-form-item>
<el-form-item <el-form-item :label="$t('cronjob.default_download_path')" prop="targetDirID">
v-if="dialogData.rowData!.targetDirID !== localDirID && dialogData.rowData!.type !== 'snapshot' && dialogData.rowData!.type !== 'log'" <el-select class="selectClass" v-model="dialogData.rowData!.targetDirID">
> <div v-for="item in accountOptions" :key="item.label">
<el-checkbox v-model="dialogData.rowData!.keepLocal"> <el-option :value="item.value" :label="item.label" />
{{ $t('cronjob.saveLocal') }} </div>
</el-checkbox> </el-select>
</el-form-item> </el-form-item>
</div> </div>
@ -356,6 +358,13 @@ const acceptParams = (params: DialogProps): void => {
changeType(); changeType();
dialogData.value.rowData.dbType = 'mysql'; dialogData.value.rowData.dbType = 'mysql';
} }
if (dialogData.value.rowData.targetAccountIDs) {
dialogData.value.rowData.targetAccountIDList = [];
let ids = dialogData.value.rowData.targetAccountIDs.split(',');
for (const id of ids) {
dialogData.value.rowData.targetAccountIDList.push(Number(id));
}
}
title.value = i18n.global.t('cronjob.' + dialogData.value.title); title.value = i18n.global.t('cronjob.' + dialogData.value.title);
if (dialogData.value?.rowData?.exclusionRules) { if (dialogData.value?.rowData?.exclusionRules) {
dialogData.value.rowData.exclusionRules = dialogData.value.rowData.exclusionRules.replaceAll(',', '\n'); dialogData.value.rowData.exclusionRules = dialogData.value.rowData.exclusionRules.replaceAll(',', '\n');
@ -389,6 +398,7 @@ const localDirID = ref();
const containerOptions = ref([]); const containerOptions = ref([]);
const websiteOptions = ref([]); const websiteOptions = ref([]);
const backupOptions = ref([]); const backupOptions = ref([]);
const accountOptions = ref([]);
const appOptions = ref([]); const appOptions = ref([]);
const dbInfo = reactive({ const dbInfo = reactive({
@ -399,6 +409,9 @@ const dbInfo = reactive({
}); });
const verifySpec = (rule: any, value: any, callback: any) => { const verifySpec = (rule: any, value: any, callback: any) => {
if (dialogData.value.rowData!.specObjs.length === 0) {
callback(new Error(i18n.global.t('cronjob.cronSpecRule')));
}
for (const item of dialogData.value.rowData!.specObjs) { for (const item of dialogData.value.rowData!.specObjs) {
switch (item.specType) { switch (item.specType) {
case 'perMonth': case 'perMonth':
@ -451,6 +464,7 @@ const rules = reactive({
dbName: [Rules.requiredSelect], dbName: [Rules.requiredSelect],
url: [Rules.requiredInput], url: [Rules.requiredInput],
sourceDir: [Rules.requiredInput], sourceDir: [Rules.requiredInput],
targetAccountIDList: [Rules.requiredSelect],
targetDirID: [Rules.requiredSelect, Rules.number], targetDirID: [Rules.requiredSelect, Rules.number],
retainCopies: [Rules.number], retainCopies: [Rules.number],
}); });
@ -475,17 +489,6 @@ const loadDatabases = async (dbType: string) => {
}; };
const changeType = () => { const changeType = () => {
if (dialogData.value.rowData.type === 'snapshot') {
dialogData.value.rowData.keepLocal = false;
dialogData.value.rowData.targetDirID = null;
for (const item of backupOptions.value) {
if (item.label !== i18n.global.t('setting.LOCAL')) {
dialogData.value.rowData.targetDirID = item.value;
break;
}
}
}
dialogData.value.rowData!.specObjs = [loadDefaultSpec(dialogData.value.rowData.type)]; dialogData.value.rowData!.specObjs = [loadDefaultSpec(dialogData.value.rowData.type)];
}; };
@ -514,12 +517,29 @@ const loadBackups = async () => {
} }
if (item.type === 'LOCAL') { if (item.type === 'LOCAL') {
localDirID.value = item.id; localDirID.value = item.id;
if (!dialogData.value.rowData!.targetDirID) { if (!dialogData.value.rowData!.targetAccountIDList) {
dialogData.value.rowData!.targetDirID = item.id; dialogData.value.rowData!.targetAccountIDList = [item.id];
} }
} }
backupOptions.value.push({ label: i18n.global.t('setting.' + item.type), value: item.id }); backupOptions.value.push({ label: i18n.global.t('setting.' + item.type), value: item.id });
} }
changeAccount();
};
const changeAccount = async () => {
accountOptions.value = [];
for (const item of backupOptions.value) {
let exit = false;
for (const ac of dialogData.value.rowData.targetAccountIDList) {
if (item.value == ac) {
exit = true;
break;
}
}
if (exit) {
accountOptions.value.push(item);
}
}
}; };
const loadAppInstalls = async () => { const loadAppInstalls = async () => {
@ -566,6 +586,7 @@ const onSubmit = async (formEl: FormInstance | undefined) => {
} }
specs.push(itemSpec); specs.push(itemSpec);
} }
dialogData.value.rowData.targetAccountIDs = dialogData.value.rowData.targetAccountIDList.join(',');
dialogData.value.rowData.spec = specs.join(','); dialogData.value.rowData.spec = specs.join(',');
if (!formEl) return; if (!formEl) return;
formEl.validate(async (valid) => { formEl.validate(async (valid) => {

View File

@ -27,48 +27,6 @@
<el-tag v-if="dialogData.rowData.status === 'Disable'" round class="status-content" type="info"> <el-tag v-if="dialogData.rowData.status === 'Disable'" round class="status-content" type="info">
{{ $t('commons.status.stopped') }} {{ $t('commons.status.stopped') }}
</el-tag> </el-tag>
<el-tag class="status-content">
<span
v-if="
dialogData.rowData?.specType.indexOf('N') === -1 ||
dialogData.rowData?.specType === 'perWeek'
"
>
{{ $t('cronjob.' + dialogData.rowData?.specType) }}&nbsp;
</span>
<span v-else>{{ $t('cronjob.per') }}</span>
<span v-if="dialogData.rowData?.specType === 'perMonth'">
{{ dialogData.rowData?.day }}{{ $t('cronjob.day') }}&nbsp;
{{ loadZero(dialogData.rowData?.hour) }} :
{{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perDay'">
{{ loadZero(dialogData.rowData?.hour) }} : {{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perWeek'">
{{ loadWeek(dialogData.rowData?.week) }}&nbsp; {{ loadZero(dialogData.rowData?.hour) }} :
{{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perNDay'">
{{ dialogData.rowData?.day }}{{ $t('commons.units.day') }},&nbsp;
{{ loadZero(dialogData.rowData?.hour) }} :
{{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perNHour'">
{{ dialogData.rowData?.hour }}{{ $t('commons.units.hour') }},&nbsp;
{{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perHour'">
&nbsp;{{ loadZero(dialogData.rowData?.minute) }}
</span>
<span v-if="dialogData.rowData?.specType === 'perNMinute'">
&nbsp;{{ dialogData.rowData?.minute }}{{ $t('commons.units.minute') }}
</span>
<span v-if="dialogData.rowData?.specType === 'perNSecond'">
&nbsp;{{ dialogData.rowData?.second }}{{ $t('commons.units.second') }}
</span>
&nbsp;{{ $t('cronjob.handle') }}
</el-tag>
<span class="buttons"> <span class="buttons">
<el-button type="primary" @click="onHandle(dialogData.rowData)" link> <el-button type="primary" @click="onHandle(dialogData.rowData)" link>
{{ $t('commons.button.handle') }} {{ $t('commons.button.handle') }}
@ -172,119 +130,6 @@
</el-col> </el-col>
<el-col :span="18"> <el-col :span="18">
<el-form label-position="top" :v-key="refresh"> <el-form label-position="top" :v-key="refresh">
<el-row type="flex" justify="center">
<el-form-item class="descriptionWide" v-if="isBackup()">
<template #label>
<span class="status-label">{{ $t('cronjob.target') }}</span>
</template>
<span class="status-count">{{ dialogData.rowData!.targetDir }}</span>
<el-button
v-if="currentRecord?.status === 'Success' && dialogData.rowData!.type !== 'snapshot' && dialogData.rowData!.type !== 'log'"
type="primary"
style="margin-left: 10px"
link
icon="Download"
@click="onDownload(currentRecord, dialogData.rowData!.targetDirID)"
>
{{ $t('file.download') }}
</el-button>
</el-form-item>
<el-form-item class="description" v-if="dialogData.rowData!.type === 'app'">
<template #label>
<span class="status-label">{{ $t('cronjob.app') }}</span>
</template>
<span v-if="dialogData.rowData!.appID !== 'all'" class="status-count">
{{ dialogData.rowData!.appID }}
</span>
<span v-else class="status-count">
{{ $t('commons.table.all') }}
</span>
</el-form-item>
<el-form-item class="description" v-if="dialogData.rowData!.type === 'website'">
<template #label>
<span class="status-label">{{ $t('cronjob.website') }}</span>
</template>
<span v-if="dialogData.rowData!.website !== 'all'" class="status-count">
{{ dialogData.rowData!.website }}
</span>
<span v-else class="status-count">
{{ $t('commons.table.all') }}
</span>
</el-form-item>
<el-form-item class="description" v-if="dialogData.rowData!.type === 'log'">
<template #label>
<span class="status-label">{{ $t('cronjob.log') }}</span>
</template>
<span class="status-count">
{{ $t('cronjob.logHelper') }}
</span>
</el-form-item>
<el-form-item class="description" v-if="dialogData.rowData!.type === 'database'">
<template #label>
<span class="status-label">{{ $t('cronjob.database') }}</span>
</template>
<span v-if="dialogData.rowData!.dbName !== 'all'" class="status-count">
{{ dialogData.rowData!.dbName }}
</span>
<span v-else class="status-count">
{{ $t('commons.table.all') }}
</span>
</el-form-item>
<el-form-item class="description" v-if="dialogData.rowData!.type === 'directory'">
<template #label>
<span class="status-label">{{ $t('cronjob.directory') }}</span>
</template>
<span v-if="dialogData.rowData!.sourceDir.length <= 12" class="status-count">
{{ dialogData.rowData!.sourceDir }}
</span>
<div v-else>
<el-popover
placement="top-start"
trigger="hover"
width="250"
:content="dialogData.rowData!.sourceDir"
>
<template #reference>
<span class="status-count">
{{ dialogData.rowData!.sourceDir.substring(0, 12) }}...
</span>
</template>
</el-popover>
</div>
</el-form-item>
<el-form-item class="description" v-if="isBackup()">
<template #label>
<span class="status-label">{{ $t('cronjob.retainCopies') }}</span>
</template>
<span class="status-count">{{ dialogData.rowData!.retainCopies }}</span>
</el-form-item>
<el-form-item
class="description"
v-if="dialogData.rowData!.type === 'snapshot'"
></el-form-item>
</el-row>
<el-form-item class="description" v-if=" dialogData.rowData!.type === 'directory'">
<template #label>
<span class="status-label">{{ $t('cronjob.exclusionRules') }}</span>
</template>
<span v-if="dialogData.rowData!.exclusionRules.length <= 12" class="status-count">
{{ dialogData.rowData!.exclusionRules }}
</span>
<div v-else>
<el-popover
placement="top-start"
trigger="hover"
width="250"
:content="dialogData.rowData!.exclusionRules"
>
<template #reference>
<span class="status-count">
{{ dialogData.rowData!.exclusionRules.substring(0, 12) }}...
</span>
</template>
</el-popover>
</div>
</el-form-item>
<el-row type="flex" justify="center"> <el-row type="flex" justify="center">
<el-form-item class="descriptionWide"> <el-form-item class="descriptionWide">
<template #label> <template #label>
@ -339,6 +184,7 @@
theme="cobalt" theme="cobalt"
:styleActiveLine="true" :styleActiveLine="true"
:extensions="extensions" :extensions="extensions"
@ready="handleReady"
v-model="currentRecordDetail" v-model="currentRecordDetail"
:disabled="true" :disabled="true"
/> />
@ -387,27 +233,19 @@
</template> </template>
<script lang="ts" setup> <script lang="ts" setup>
import { onBeforeUnmount, reactive, ref } from 'vue'; import { onBeforeUnmount, reactive, ref, shallowRef } from 'vue';
import { Cronjob } from '@/api/interface/cronjob'; import { Cronjob } from '@/api/interface/cronjob';
import { loadZero } from '@/utils/util'; import { searchRecords, handleOnce, updateStatus, cleanRecords, getRecordLog } from '@/api/modules/cronjob';
import {
searchRecords,
downloadRecord,
handleOnce,
updateStatus,
cleanRecords,
getRecordLog,
downloadRecordCheck,
} from '@/api/modules/cronjob';
import { dateFormat } from '@/utils/util'; import { dateFormat } from '@/utils/util';
import i18n from '@/lang'; import i18n from '@/lang';
import { ElMessageBox } from 'element-plus'; import { ElMessageBox } from 'element-plus';
import { Codemirror } from 'vue-codemirror'; import { Codemirror } from 'vue-codemirror';
import { javascript } from '@codemirror/lang-javascript'; import { javascript } from '@codemirror/lang-javascript';
import { oneDark } from '@codemirror/theme-one-dark'; import { oneDark } from '@codemirror/theme-one-dark';
import { MsgError, MsgInfo, MsgSuccess } from '@/utils/message'; import { MsgSuccess } from '@/utils/message';
import { listDbItems } from '@/api/modules/database'; import { listDbItems } from '@/api/modules/database';
import { ListAppInstalled } from '@/api/modules/app'; import { ListAppInstalled } from '@/api/modules/app';
import { shortcuts } from './../helper';
const loading = ref(); const loading = ref();
const refresh = ref(false); const refresh = ref(false);
@ -417,6 +255,10 @@ let timer: NodeJS.Timer | null = null;
const mymirror = ref(); const mymirror = ref();
const extensions = [javascript(), oneDark]; const extensions = [javascript(), oneDark];
const view = shallowRef();
const handleReady = (payload) => {
view.value = payload.view;
};
interface DialogProps { interface DialogProps {
rowData: Cronjob.CronjobInfo; rowData: Cronjob.CronjobInfo;
@ -475,61 +317,6 @@ const handleCurrentChange = (val: number) => {
search(); search();
}; };
const shortcuts = [
{
text: i18n.global.t('monitor.today'),
value: () => {
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(new Date().setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.yesterday'),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 1);
const end = new Date(itemDate.setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [3]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 3);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [7]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 7);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
{
text: i18n.global.t('monitor.lastNDay', [30]),
value: () => {
const itemDate = new Date(new Date().getTime() - 3600 * 1000 * 24 * 30);
const end = new Date(new Date().setHours(23, 59, 59, 999));
const start = new Date(itemDate.setHours(0, 0, 0, 0));
return [start, end];
},
},
];
const weekOptions = [
{ label: i18n.global.t('cronjob.monday'), value: 1 },
{ label: i18n.global.t('cronjob.tuesday'), value: 2 },
{ label: i18n.global.t('cronjob.wednesday'), value: 3 },
{ label: i18n.global.t('cronjob.thursday'), value: 4 },
{ label: i18n.global.t('cronjob.friday'), value: 5 },
{ label: i18n.global.t('cronjob.saturday'), value: 6 },
{ label: i18n.global.t('cronjob.sunday'), value: 0 },
];
const timeRangeLoad = ref<[Date, Date]>([ const timeRangeLoad = ref<[Date, Date]>([
new Date(new Date(new Date().getTime() - 3600 * 1000 * 24 * 7).setHours(0, 0, 0, 0)), new Date(new Date(new Date().getTime() - 3600 * 1000 * 24 * 7).setHours(0, 0, 0, 0)),
new Date(new Date().setHours(23, 59, 59, 999)), new Date(new Date().setHours(23, 59, 59, 999)),
@ -613,58 +400,6 @@ const search = async () => {
} }
}; };
const onDownload = async (record: any, backupID: number) => {
let type = '';
switch (dialogData.value.rowData.type) {
case 'database':
type = i18n.global.t('database.database');
if (dialogData.value.rowData!.dbName === 'all') {
MsgInfo(i18n.global.t('cronjob.allOptionHelper', [type]));
return;
}
break;
case 'app':
type = i18n.global.t('app.app');
if (dialogData.value.rowData!.appID === 'all') {
MsgInfo(i18n.global.t('cronjob.allOptionHelper', [type]));
return;
}
break;
case 'website':
type = i18n.global.t('website.website');
if (dialogData.value.rowData!.website === 'all') {
MsgInfo(i18n.global.t('cronjob.allOptionHelper', [type]));
return;
}
break;
}
if (currentRecord.value.file.indexOf(',') !== -1) {
MsgInfo(i18n.global.t('cronjob.allOptionHelper', [type]));
return;
}
if (!record.file || record.file.indexOf('/') === -1) {
MsgError(i18n.global.t('cronjob.errPath', [record.file]));
return;
}
let params = {
recordID: record.id,
backupAccountID: backupID,
};
await downloadRecordCheck(params).then(async () => {
const file = await downloadRecord(params);
const downloadUrl = window.URL.createObjectURL(new Blob([file]));
const a = document.createElement('a');
a.style.display = 'none';
a.href = downloadUrl;
if (record.file && record.file.indexOf('/') !== -1) {
let pathItem = record.file.split('/');
a.download = pathItem[pathItem.length - 1];
}
const event = new MouseEvent('click');
a.dispatchEvent(event);
});
};
const forDetail = async (row: Cronjob.Record) => { const forDetail = async (row: Cronjob.Record) => {
currentRecord.value = row; currentRecord.value = row;
loadRecord(row); loadRecord(row);
@ -677,29 +412,30 @@ const loadRecord = async (row: Cronjob.Record) => {
if (row.records) { if (row.records) {
const res = await getRecordLog(row.id); const res = await getRecordLog(row.id);
currentRecordDetail.value = res.data; currentRecordDetail.value = res.data;
const state = view.value.state;
view.value.dispatch({
selection: { anchor: state.doc.length, head: state.doc.length },
scrollIntoView: true,
});
} }
}; };
const onClean = async () => { const onClean = async () => {
if (!isBackup()) { ElMessageBox.confirm(i18n.global.t('commons.msg.clean'), i18n.global.t('commons.msg.deleteTitle'), {
ElMessageBox.confirm(i18n.global.t('commons.msg.clean'), i18n.global.t('commons.msg.deleteTitle'), { confirmButtonText: i18n.global.t('commons.button.confirm'),
confirmButtonText: i18n.global.t('commons.button.confirm'), cancelButtonText: i18n.global.t('commons.button.cancel'),
cancelButtonText: i18n.global.t('commons.button.cancel'), type: 'warning',
type: 'warning', }).then(async () => {
}).then(async () => { await cleanRecords(dialogData.value.rowData.id, cleanData.value)
await cleanRecords(dialogData.value.rowData.id, cleanData.value) .then(() => {
.then(() => { delLoading.value = false;
delLoading.value = false; MsgSuccess(i18n.global.t('commons.msg.operationSuccess'));
MsgSuccess(i18n.global.t('commons.msg.operationSuccess')); search();
search(); })
}) .catch(() => {
.catch(() => { delLoading.value = false;
delLoading.value = false; });
}); });
});
} else {
deleteVisible.value = true;
}
}; };
const cleanRecord = async () => { const cleanRecord = async () => {
@ -716,25 +452,6 @@ const cleanRecord = async () => {
}); });
}; };
function isBackup() {
return (
dialogData.value.rowData!.type === 'app' ||
dialogData.value.rowData!.type === 'website' ||
dialogData.value.rowData!.type === 'database' ||
dialogData.value.rowData!.type === 'directory' ||
dialogData.value.rowData!.type === 'snapshot' ||
dialogData.value.rowData!.type === 'log'
);
}
function loadWeek(i: number) {
for (const week of weekOptions) {
if (week.value === i) {
return week.label;
}
}
return '';
}
onBeforeUnmount(() => { onBeforeUnmount(() => {
clearInterval(Number(timer)); clearInterval(Number(timer));
timer = null; timer = null;

View File

@ -22,7 +22,7 @@
<el-radio-button :label="true">{{ $t('setting.isCN') }}</el-radio-button> <el-radio-button :label="true">{{ $t('setting.isCN') }}</el-radio-button>
</el-radio-group> </el-radio-group>
<span class="input-help"> <span class="input-help">
{{ $t('setting.cn_onedrive_helper') }} {{ $t('setting.onedrive_helper') }}
<el-link <el-link
style="font-size: 12px; margin-left: 5px" style="font-size: 12px; margin-left: 5px"
icon="Position" icon="Position"