1
0
mirror of https://github.com/1Panel-dev/1Panel.git synced 2025-02-07 17:10:07 +08:00

feat: 压缩和解压缩 tar.gz 文件时支持设置压缩密码 (#5258)

This commit is contained in:
John Bro 2024-06-04 16:48:31 +08:00 committed by GitHub
parent dab5434125
commit 0182586869
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 310 additions and 118 deletions

View File

@ -35,6 +35,7 @@ type CommonBackup struct {
Type string `json:"type" validate:"required,oneof=app mysql mariadb redis website postgresql"` Type string `json:"type" validate:"required,oneof=app mysql mariadb redis website postgresql"`
Name string `json:"name"` Name string `json:"name"`
DetailName string `json:"detailName"` DetailName string `json:"detailName"`
Secret string `json:"secret"`
} }
type CommonRecover struct { type CommonRecover struct {
Source string `json:"source" validate:"required,oneof=OSS S3 SFTP MINIO LOCAL COS KODO OneDrive WebDAV"` Source string `json:"source" validate:"required,oneof=OSS S3 SFTP MINIO LOCAL COS KODO OneDrive WebDAV"`
@ -42,6 +43,7 @@ type CommonRecover struct {
Name string `json:"name"` Name string `json:"name"`
DetailName string `json:"detailName"` DetailName string `json:"detailName"`
File string `json:"file"` File string `json:"file"`
Secret string `json:"secret"`
} }
type RecordSearch struct { type RecordSearch struct {

View File

@ -1,6 +1,8 @@
package dto package dto
import "time" import (
"time"
)
type CronjobCreate struct { type CronjobCreate struct {
Name string `json:"name" validate:"required"` Name string `json:"name" validate:"required"`
@ -21,6 +23,7 @@ type CronjobCreate struct {
BackupAccounts string `json:"backupAccounts"` BackupAccounts string `json:"backupAccounts"`
DefaultDownload string `json:"defaultDownload"` DefaultDownload string `json:"defaultDownload"`
RetainCopies int `json:"retainCopies" validate:"number,min=1"` RetainCopies int `json:"retainCopies" validate:"number,min=1"`
Secret string `json:"secret"`
} }
type CronjobUpdate struct { type CronjobUpdate struct {
@ -42,6 +45,7 @@ type CronjobUpdate struct {
BackupAccounts string `json:"backupAccounts"` BackupAccounts string `json:"backupAccounts"`
DefaultDownload string `json:"defaultDownload"` DefaultDownload string `json:"defaultDownload"`
RetainCopies int `json:"retainCopies" validate:"number,min=1"` RetainCopies int `json:"retainCopies" validate:"number,min=1"`
Secret string `json:"secret"`
} }
type CronjobUpdateStatus struct { type CronjobUpdateStatus struct {
@ -87,6 +91,7 @@ type CronjobInfo struct {
LastRecordTime string `json:"lastRecordTime"` LastRecordTime string `json:"lastRecordTime"`
Status string `json:"status"` Status string `json:"status"`
Secret string `json:"secret"`
} }
type SearchRecord struct { type SearchRecord struct {

View File

@ -54,12 +54,14 @@ type FileCompress struct {
Type string `json:"type" validate:"required"` Type string `json:"type" validate:"required"`
Name string `json:"name" validate:"required"` Name string `json:"name" validate:"required"`
Replace bool `json:"replace"` Replace bool `json:"replace"`
Secret string `json:"secret"`
} }
type FileDeCompress struct { type FileDeCompress struct {
Dst string `json:"dst" validate:"required"` Dst string `json:"dst" validate:"required"`
Type string `json:"type" validate:"required"` Type string `json:"type" validate:"required"`
Path string `json:"path" validate:"required"` Path string `json:"path" validate:"required"`
Secret string `json:"secret"`
} }
type FileEdit struct { type FileEdit struct {

View File

@ -116,11 +116,13 @@ type SnapshotCreate struct {
From string `json:"from" validate:"required"` From string `json:"from" validate:"required"`
DefaultDownload string `json:"defaultDownload" validate:"required"` DefaultDownload string `json:"defaultDownload" validate:"required"`
Description string `json:"description" validate:"max=256"` Description string `json:"description" validate:"max=256"`
Secret string `json:"secret"`
} }
type SnapshotRecover struct { type SnapshotRecover struct {
IsNew bool `json:"isNew"` IsNew bool `json:"isNew"`
ReDownload bool `json:"reDownload"` ReDownload bool `json:"reDownload"`
ID uint `json:"id" validate:"required"` ID uint `json:"id" validate:"required"`
Secret string `json:"secret"`
} }
type SnapshotBatchDelete struct { type SnapshotBatchDelete struct {
DeleteWithFile bool `json:"deleteWithFile"` DeleteWithFile bool `json:"deleteWithFile"`

View File

@ -1,6 +1,8 @@
package model package model
import "time" import (
"time"
)
type Cronjob struct { type Cronjob struct {
BaseModel BaseModel
@ -31,6 +33,7 @@ type Cronjob struct {
Status string `gorm:"type:varchar(64)" json:"status"` Status string `gorm:"type:varchar(64)" json:"status"`
EntryIDs string `gorm:"type:varchar(64)" json:"entryIDs"` EntryIDs string `gorm:"type:varchar(64)" json:"entryIDs"`
Records []JobRecords `json:"records"` Records []JobRecords `json:"records"`
Secret string `gorm:"type:varchar(64)" json:"secret"`
} }
type JobRecords struct { type JobRecords struct {

View File

@ -767,7 +767,7 @@ func getAppFromRepo(downloadPath string) error {
if err := fileOp.DownloadFile(downloadUrl, packagePath); err != nil { if err := fileOp.DownloadFile(downloadUrl, packagePath); err != nil {
return err return err
} }
if err := fileOp.Decompress(packagePath, constant.ResourceDir, files.SdkZip); err != nil { if err := fileOp.Decompress(packagePath, constant.ResourceDir, files.SdkZip, ""); err != nil {
return err return err
} }
defer func() { defer func() {

View File

@ -765,7 +765,7 @@ func downloadApp(app model.App, appDetail model.AppDetail, appInstall *model.App
global.LOG.Errorf("download app[%s] error %v", app.Name, err) global.LOG.Errorf("download app[%s] error %v", app.Name, err)
return return
} }
if err = fileOp.Decompress(filePath, appResourceDir, files.SdkTarGz); err != nil { if err = fileOp.Decompress(filePath, appResourceDir, files.SdkTarGz, ""); err != nil {
global.LOG.Errorf("decompress app[%s] error %v", app.Name, err) global.LOG.Errorf("decompress app[%s] error %v", app.Name, err)
return return
} }

View File

@ -40,7 +40,7 @@ func (u *BackupService) AppBackup(req dto.CommonBackup) (*model.BackupRecord, er
backupDir := path.Join(localDir, itemDir) backupDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.tar.gz", req.DetailName, timeNow+common.RandStrAndNum(5)) fileName := fmt.Sprintf("%s_%s.tar.gz", req.DetailName, timeNow+common.RandStrAndNum(5))
if err := handleAppBackup(&install, backupDir, fileName, ""); err != nil { if err := handleAppBackup(&install, backupDir, fileName, "", req.Secret); err != nil {
return nil, err return nil, err
} }
@ -78,13 +78,13 @@ func (u *BackupService) AppRecover(req dto.CommonRecover) error {
if _, err := compose.Down(install.GetComposePath()); err != nil { if _, err := compose.Down(install.GetComposePath()); err != nil {
return err return err
} }
if err := handleAppRecover(&install, req.File, false); err != nil { if err := handleAppRecover(&install, req.File, false, req.Secret); err != nil {
return err return err
} }
return nil return nil
} }
func handleAppBackup(install *model.AppInstall, backupDir, fileName string, excludes string) error { func handleAppBackup(install *model.AppInstall, backupDir, fileName string, excludes string, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", "")) tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", ""))
if !fileOp.Stat(tmpDir) { if !fileOp.Stat(tmpDir) {
@ -103,7 +103,7 @@ func handleAppBackup(install *model.AppInstall, backupDir, fileName string, excl
} }
appPath := install.GetPath() appPath := install.GetPath()
if err := handleTar(appPath, tmpDir, "app.tar.gz", excludes); err != nil { if err := handleTar(appPath, tmpDir, "app.tar.gz", excludes, ""); err != nil {
return err return err
} }
@ -129,16 +129,16 @@ func handleAppBackup(install *model.AppInstall, backupDir, fileName string, excl
} }
} }
if err := handleTar(tmpDir, backupDir, fileName, ""); err != nil { if err := handleTar(tmpDir, backupDir, fileName, "", secret); err != nil {
return err return err
} }
return nil return nil
} }
func handleAppRecover(install *model.AppInstall, recoverFile string, isRollback bool) error { func handleAppRecover(install *model.AppInstall, recoverFile string, isRollback bool, secret string) error {
isOk := false isOk := false
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
if err := handleUnTar(recoverFile, path.Dir(recoverFile)); err != nil { if err := handleUnTar(recoverFile, path.Dir(recoverFile), secret); err != nil {
return err return err
} }
tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "") tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "")
@ -164,13 +164,13 @@ func handleAppRecover(install *model.AppInstall, recoverFile string, isRollback
if !isRollback { if !isRollback {
rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("app/%s_%s.tar.gz", install.Name, time.Now().Format("20060102150405"))) rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("app/%s_%s.tar.gz", install.Name, time.Now().Format("20060102150405")))
if err := handleAppBackup(install, path.Dir(rollbackFile), path.Base(rollbackFile), ""); err != nil { if err := handleAppBackup(install, path.Dir(rollbackFile), path.Base(rollbackFile), "", ""); err != nil {
return fmt.Errorf("backup app %s for rollback before recover failed, err: %v", install.Name, err) return fmt.Errorf("backup app %s for rollback before recover failed, err: %v", install.Name, err)
} }
defer func() { defer func() {
if !isOk { if !isOk {
global.LOG.Info("recover failed, start to rollback now") global.LOG.Info("recover failed, start to rollback now")
if err := handleAppRecover(install, rollbackFile, true); err != nil { if err := handleAppRecover(install, rollbackFile, true, secret); err != nil {
global.LOG.Errorf("rollback app %s from %s failed, err: %v", install.Name, rollbackFile, err) global.LOG.Errorf("rollback app %s from %s failed, err: %v", install.Name, rollbackFile, err)
return return
} }
@ -251,7 +251,7 @@ func handleAppRecover(install *model.AppInstall, recoverFile string, isRollback
_ = fileOp.Rename(appDir, backPath) _ = fileOp.Rename(appDir, backPath)
_ = fileOp.CreateDir(appDir, 0755) _ = fileOp.CreateDir(appDir, 0755)
if err := handleUnTar(tmpPath+"/app.tar.gz", install.GetAppPath()); err != nil { if err := handleUnTar(tmpPath+"/app.tar.gz", install.GetAppPath(), ""); err != nil {
global.LOG.Errorf("handle recover from app.tar.gz failed, err: %v", err) global.LOG.Errorf("handle recover from app.tar.gz failed, err: %v", err)
_ = fileOp.DeleteDir(appDir) _ = fileOp.DeleteDir(appDir)
_ = fileOp.Rename(backPath, appDir) _ = fileOp.Rename(backPath, appDir)

View File

@ -66,7 +66,7 @@ func (u *BackupService) MysqlRecoverByUpload(req dto.CommonRecover) error {
return fmt.Errorf("mkdir %s failed, err: %v", dstDir, err) return fmt.Errorf("mkdir %s failed, err: %v", dstDir, err)
} }
} }
if err := handleUnTar(req.File, dstDir); err != nil { if err := handleUnTar(req.File, dstDir, ""); err != nil {
_ = os.RemoveAll(dstDir) _ = os.RemoveAll(dstDir)
return err return err
} }

View File

@ -66,7 +66,7 @@ func (u *BackupService) PostgresqlRecoverByUpload(req dto.CommonRecover) error {
return fmt.Errorf("mkdir %s failed, err: %v", dstDir, err) return fmt.Errorf("mkdir %s failed, err: %v", dstDir, err)
} }
} }
if err := handleUnTar(req.File, dstDir); err != nil { if err := handleUnTar(req.File, dstDir, ""); err != nil {
_ = os.RemoveAll(dstDir) _ = os.RemoveAll(dstDir)
return err return err
} }

View File

@ -46,7 +46,7 @@ func (u *BackupService) RedisBackup(db dto.CommonBackup) error {
} }
itemDir := fmt.Sprintf("database/redis/%s", redisInfo.Name) itemDir := fmt.Sprintf("database/redis/%s", redisInfo.Name)
backupDir := path.Join(localDir, itemDir) backupDir := path.Join(localDir, itemDir)
if err := handleRedisBackup(redisInfo, backupDir, fileName); err != nil { if err := handleRedisBackup(redisInfo, backupDir, fileName, db.Secret); err != nil {
return err return err
} }
record := &model.BackupRecord{ record := &model.BackupRecord{
@ -70,13 +70,13 @@ func (u *BackupService) RedisRecover(req dto.CommonRecover) error {
return err return err
} }
global.LOG.Infof("recover redis from backup file %s", req.File) global.LOG.Infof("recover redis from backup file %s", req.File)
if err := handleRedisRecover(redisInfo, req.File, false); err != nil { if err := handleRedisRecover(redisInfo, req.File, false, req.Secret); err != nil {
return err return err
} }
return nil return nil
} }
func handleRedisBackup(redisInfo *repo.RootInfo, backupDir, fileName string) error { func handleRedisBackup(redisInfo *repo.RootInfo, backupDir, fileName string, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
if !fileOp.Stat(backupDir) { if !fileOp.Stat(backupDir) {
if err := os.MkdirAll(backupDir, os.ModePerm); err != nil { if err := os.MkdirAll(backupDir, os.ModePerm); err != nil {
@ -91,7 +91,7 @@ func handleRedisBackup(redisInfo *repo.RootInfo, backupDir, fileName string) err
if strings.HasSuffix(fileName, ".tar.gz") { if strings.HasSuffix(fileName, ".tar.gz") {
redisDataDir := fmt.Sprintf("%s/%s/%s/data/appendonlydir", constant.AppInstallDir, "redis", redisInfo.Name) redisDataDir := fmt.Sprintf("%s/%s/%s/data/appendonlydir", constant.AppInstallDir, "redis", redisInfo.Name)
if err := handleTar(redisDataDir, backupDir, fileName, ""); err != nil { if err := handleTar(redisDataDir, backupDir, fileName, "", secret); err != nil {
return err return err
} }
return nil return nil
@ -111,7 +111,7 @@ func handleRedisBackup(redisInfo *repo.RootInfo, backupDir, fileName string) err
return nil return nil
} }
func handleRedisRecover(redisInfo *repo.RootInfo, recoverFile string, isRollback bool) error { func handleRedisRecover(redisInfo *repo.RootInfo, recoverFile string, isRollback bool, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
if !fileOp.Stat(recoverFile) { if !fileOp.Stat(recoverFile) {
return buserr.WithName("ErrFileNotFound", recoverFile) return buserr.WithName("ErrFileNotFound", recoverFile)
@ -147,13 +147,13 @@ func handleRedisRecover(redisInfo *repo.RootInfo, recoverFile string, isRollback
} }
} }
rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("database/redis/%s_%s.%s", redisInfo.Name, time.Now().Format("20060102150405"), suffix)) rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("database/redis/%s_%s.%s", redisInfo.Name, time.Now().Format("20060102150405"), suffix))
if err := handleRedisBackup(redisInfo, path.Dir(rollbackFile), path.Base(rollbackFile)); err != nil { if err := handleRedisBackup(redisInfo, path.Dir(rollbackFile), path.Base(rollbackFile), secret); err != nil {
return fmt.Errorf("backup database %s for rollback before recover failed, err: %v", redisInfo.Name, err) return fmt.Errorf("backup database %s for rollback before recover failed, err: %v", redisInfo.Name, err)
} }
defer func() { defer func() {
if !isOk { if !isOk {
global.LOG.Info("recover failed, start to rollback now") global.LOG.Info("recover failed, start to rollback now")
if err := handleRedisRecover(redisInfo, rollbackFile, true); err != nil { if err := handleRedisRecover(redisInfo, rollbackFile, true, secret); err != nil {
global.LOG.Errorf("rollback redis from %s failed, err: %v", rollbackFile, err) global.LOG.Errorf("rollback redis from %s failed, err: %v", rollbackFile, err)
return return
} }
@ -170,7 +170,7 @@ func handleRedisRecover(redisInfo *repo.RootInfo, recoverFile string, isRollback
} }
if appendonly == "yes" && strings.HasPrefix(redisInfo.Version, "7.") { if appendonly == "yes" && strings.HasPrefix(redisInfo.Version, "7.") {
redisDataDir := fmt.Sprintf("%s/%s/%s/data", constant.AppInstallDir, "redis", redisInfo.Name) redisDataDir := fmt.Sprintf("%s/%s/%s/data", constant.AppInstallDir, "redis", redisInfo.Name)
if err := handleUnTar(recoverFile, redisDataDir); err != nil { if err := handleUnTar(recoverFile, redisDataDir, secret); err != nil {
return err return err
} }
} else { } else {

View File

@ -16,7 +16,7 @@ import (
"github.com/1Panel-dev/1Panel/backend/utils/files" "github.com/1Panel-dev/1Panel/backend/utils/files"
) )
func handleRuntimeBackup(runtime *model.Runtime, backupDir, fileName string, excludes string) error { func handleRuntimeBackup(runtime *model.Runtime, backupDir, fileName string, excludes string, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", "")) tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", ""))
if !fileOp.Stat(tmpDir) { if !fileOp.Stat(tmpDir) {
@ -35,19 +35,19 @@ func handleRuntimeBackup(runtime *model.Runtime, backupDir, fileName string, exc
} }
appPath := runtime.GetPath() appPath := runtime.GetPath()
if err := handleTar(appPath, tmpDir, "runtime.tar.gz", excludes); err != nil { if err := handleTar(appPath, tmpDir, "runtime.tar.gz", excludes, secret); err != nil {
return err return err
} }
if err := handleTar(tmpDir, backupDir, fileName, ""); err != nil { if err := handleTar(tmpDir, backupDir, fileName, "", secret); err != nil {
return err return err
} }
return nil return nil
} }
func handleRuntimeRecover(runtime *model.Runtime, recoverFile string, isRollback bool) error { func handleRuntimeRecover(runtime *model.Runtime, recoverFile string, isRollback bool, secret string) error {
isOk := false isOk := false
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
if err := handleUnTar(recoverFile, path.Dir(recoverFile)); err != nil { if err := handleUnTar(recoverFile, path.Dir(recoverFile), secret); err != nil {
return err return err
} }
tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "") tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "")
@ -73,13 +73,13 @@ func handleRuntimeRecover(runtime *model.Runtime, recoverFile string, isRollback
if !isRollback { if !isRollback {
rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("runtime/%s_%s.tar.gz", runtime.Name, time.Now().Format("20060102150405"))) rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("runtime/%s_%s.tar.gz", runtime.Name, time.Now().Format("20060102150405")))
if err := handleRuntimeBackup(runtime, path.Dir(rollbackFile), path.Base(rollbackFile), ""); err != nil { if err := handleRuntimeBackup(runtime, path.Dir(rollbackFile), path.Base(rollbackFile), "", secret); err != nil {
return fmt.Errorf("backup runtime %s for rollback before recover failed, err: %v", runtime.Name, err) return fmt.Errorf("backup runtime %s for rollback before recover failed, err: %v", runtime.Name, err)
} }
defer func() { defer func() {
if !isOk { if !isOk {
global.LOG.Info("recover failed, start to rollback now") global.LOG.Info("recover failed, start to rollback now")
if err := handleRuntimeRecover(runtime, rollbackFile, true); err != nil { if err := handleRuntimeRecover(runtime, rollbackFile, true, secret); err != nil {
global.LOG.Errorf("rollback runtime %s from %s failed, err: %v", runtime.Name, rollbackFile, err) global.LOG.Errorf("rollback runtime %s from %s failed, err: %v", runtime.Name, rollbackFile, err)
return return
} }
@ -100,7 +100,7 @@ func handleRuntimeRecover(runtime *model.Runtime, recoverFile string, isRollback
_ = fileOp.Rename(runtimeDir, backPath) _ = fileOp.Rename(runtimeDir, backPath)
_ = fileOp.CreateDir(runtimeDir, 0755) _ = fileOp.CreateDir(runtimeDir, 0755)
if err := handleUnTar(tmpPath+"/runtime.tar.gz", fmt.Sprintf("%s/%s", constant.RuntimeDir, runtime.Type)); err != nil { if err := handleUnTar(tmpPath+"/runtime.tar.gz", fmt.Sprintf("%s/%s", constant.RuntimeDir, runtime.Type), secret); err != nil {
global.LOG.Errorf("handle recover from runtime.tar.gz failed, err: %v", err) global.LOG.Errorf("handle recover from runtime.tar.gz failed, err: %v", err)
_ = fileOp.DeleteDir(runtimeDir) _ = fileOp.DeleteDir(runtimeDir)
_ = fileOp.Rename(backPath, runtimeDir) _ = fileOp.Rename(backPath, runtimeDir)

View File

@ -35,7 +35,7 @@ func (u *BackupService) WebsiteBackup(req dto.CommonBackup) error {
itemDir := fmt.Sprintf("website/%s", req.Name) itemDir := fmt.Sprintf("website/%s", req.Name)
backupDir := path.Join(localDir, itemDir) backupDir := path.Join(localDir, itemDir)
fileName := fmt.Sprintf("%s_%s.tar.gz", website.PrimaryDomain, timeNow+common.RandStrAndNum(5)) fileName := fmt.Sprintf("%s_%s.tar.gz", website.PrimaryDomain, timeNow+common.RandStrAndNum(5))
if err := handleWebsiteBackup(&website, backupDir, fileName, ""); err != nil { if err := handleWebsiteBackup(&website, backupDir, fileName, "", req.Secret); err != nil {
return err return err
} }
@ -65,16 +65,16 @@ func (u *BackupService) WebsiteRecover(req dto.CommonRecover) error {
return err return err
} }
global.LOG.Infof("recover website %s from backup file %s", req.Name, req.File) global.LOG.Infof("recover website %s from backup file %s", req.Name, req.File)
if err := handleWebsiteRecover(&website, req.File, false); err != nil { if err := handleWebsiteRecover(&website, req.File, false, req.Secret); err != nil {
return err return err
} }
return nil return nil
} }
func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback bool) error { func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback bool, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "") tmpPath := strings.ReplaceAll(recoverFile, ".tar.gz", "")
if err := handleUnTar(recoverFile, path.Dir(recoverFile)); err != nil { if err := handleUnTar(recoverFile, path.Dir(recoverFile), secret); err != nil {
return err return err
} }
defer func() { defer func() {
@ -107,13 +107,13 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback
isOk := false isOk := false
if !isRollback { if !isRollback {
rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s_%s.tar.gz", website.Alias, time.Now().Format("20060102150405"))) rollbackFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s_%s.tar.gz", website.Alias, time.Now().Format("20060102150405")))
if err := handleWebsiteBackup(website, path.Dir(rollbackFile), path.Base(rollbackFile), ""); err != nil { if err := handleWebsiteBackup(website, path.Dir(rollbackFile), path.Base(rollbackFile), "", ""); err != nil {
return fmt.Errorf("backup website %s for rollback before recover failed, err: %v", website.Alias, err) return fmt.Errorf("backup website %s for rollback before recover failed, err: %v", website.Alias, err)
} }
defer func() { defer func() {
if !isOk { if !isOk {
global.LOG.Info("recover failed, start to rollback now") global.LOG.Info("recover failed, start to rollback now")
if err := handleWebsiteRecover(website, rollbackFile, true); err != nil { if err := handleWebsiteRecover(website, rollbackFile, true, secret); err != nil {
global.LOG.Errorf("rollback website %s from %s failed, err: %v", website.Alias, rollbackFile, err) global.LOG.Errorf("rollback website %s from %s failed, err: %v", website.Alias, rollbackFile, err)
return return
} }
@ -141,7 +141,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback
if err != nil { if err != nil {
return err return err
} }
if err := handleAppRecover(&app, fmt.Sprintf("%s/%s.app.tar.gz", tmpPath, website.Alias), true); err != nil { if err := handleAppRecover(&app, fmt.Sprintf("%s/%s.app.tar.gz", tmpPath, website.Alias), true, ""); err != nil {
global.LOG.Errorf("handle recover from app.tar.gz failed, err: %v", err) global.LOG.Errorf("handle recover from app.tar.gz failed, err: %v", err)
return err return err
} }
@ -155,7 +155,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback
return err return err
} }
if runtime.Type == constant.RuntimeNode { if runtime.Type == constant.RuntimeNode {
if err := handleRuntimeRecover(runtime, fmt.Sprintf("%s/%s.runtime.tar.gz", tmpPath, website.Alias), true); err != nil { if err := handleRuntimeRecover(runtime, fmt.Sprintf("%s/%s.runtime.tar.gz", tmpPath, website.Alias), true, ""); err != nil {
return err return err
} }
global.LOG.Info("put runtime.tar.gz into tmp dir successful") global.LOG.Info("put runtime.tar.gz into tmp dir successful")
@ -163,7 +163,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback
} }
siteDir := fmt.Sprintf("%s/openresty/%s/www/sites", constant.AppInstallDir, nginxInfo.Name) siteDir := fmt.Sprintf("%s/openresty/%s/www/sites", constant.AppInstallDir, nginxInfo.Name)
if err := handleUnTar(fmt.Sprintf("%s/%s.web.tar.gz", tmpPath, website.Alias), siteDir); err != nil { if err := handleUnTar(fmt.Sprintf("%s/%s.web.tar.gz", tmpPath, website.Alias), siteDir, ""); err != nil {
global.LOG.Errorf("handle recover from web.tar.gz failed, err: %v", err) global.LOG.Errorf("handle recover from web.tar.gz failed, err: %v", err)
return err return err
} }
@ -182,7 +182,7 @@ func handleWebsiteRecover(website *model.Website, recoverFile string, isRollback
return nil return nil
} }
func handleWebsiteBackup(website *model.Website, backupDir, fileName string, excludes string) error { func handleWebsiteBackup(website *model.Website, backupDir, fileName string, excludes string, secret string) error {
fileOp := files.NewFileOp() fileOp := files.NewFileOp()
tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", "")) tmpDir := fmt.Sprintf("%s/%s", backupDir, strings.ReplaceAll(fileName, ".tar.gz", ""))
if !fileOp.Stat(tmpDir) { if !fileOp.Stat(tmpDir) {
@ -216,7 +216,7 @@ func handleWebsiteBackup(website *model.Website, backupDir, fileName string, exc
if err != nil { if err != nil {
return err return err
} }
if err := handleAppBackup(&app, tmpDir, fmt.Sprintf("%s.app.tar.gz", website.Alias), excludes); err != nil { if err := handleAppBackup(&app, tmpDir, fmt.Sprintf("%s.app.tar.gz", website.Alias), excludes, ""); err != nil {
return err return err
} }
global.LOG.Info("put app.tar.gz into tmp dir successful") global.LOG.Info("put app.tar.gz into tmp dir successful")
@ -226,7 +226,7 @@ func handleWebsiteBackup(website *model.Website, backupDir, fileName string, exc
return err return err
} }
if runtime.Type == constant.RuntimeNode { if runtime.Type == constant.RuntimeNode {
if err := handleRuntimeBackup(runtime, tmpDir, fmt.Sprintf("%s.runtime.tar.gz", website.Alias), excludes); err != nil { if err := handleRuntimeBackup(runtime, tmpDir, fmt.Sprintf("%s.runtime.tar.gz", website.Alias), excludes, ""); err != nil {
return err return err
} }
global.LOG.Info("put runtime.tar.gz into tmp dir successful") global.LOG.Info("put runtime.tar.gz into tmp dir successful")
@ -234,11 +234,11 @@ func handleWebsiteBackup(website *model.Website, backupDir, fileName string, exc
} }
websiteDir := fmt.Sprintf("%s/openresty/%s/www/sites/%s", constant.AppInstallDir, nginxInfo.Name, website.Alias) websiteDir := fmt.Sprintf("%s/openresty/%s/www/sites/%s", constant.AppInstallDir, nginxInfo.Name, website.Alias)
if err := handleTar(websiteDir, tmpDir, fmt.Sprintf("%s.web.tar.gz", website.Alias), excludes); err != nil { if err := handleTar(websiteDir, tmpDir, fmt.Sprintf("%s.web.tar.gz", website.Alias), excludes, ""); err != nil {
return err return err
} }
global.LOG.Info("put web.tar.gz into tmp dir successful, now start to tar tmp dir") global.LOG.Info("put web.tar.gz into tmp dir successful, now start to tar tmp dir")
if err := handleTar(tmpDir, backupDir, fileName, ""); err != nil { if err := handleTar(tmpDir, backupDir, fileName, "", secret); err != nil {
return err return err
} }

View File

@ -173,6 +173,7 @@ func (u *CronjobService) Create(cronjobDto dto.CronjobCreate) error {
if cronjob.ID != 0 { if cronjob.ID != 0 {
return constant.ErrRecordExist return constant.ErrRecordExist
} }
cronjob.Secret = cronjobDto.Secret
if err := copier.Copy(&cronjob, &cronjobDto); err != nil { if err := copier.Copy(&cronjob, &cronjobDto); err != nil {
return errors.WithMessage(constant.ErrStructTransform, err.Error()) return errors.WithMessage(constant.ErrStructTransform, err.Error())
} }
@ -279,6 +280,7 @@ func (u *CronjobService) Update(id uint, req dto.CronjobUpdate) error {
upMap["backup_accounts"] = req.BackupAccounts upMap["backup_accounts"] = req.BackupAccounts
upMap["default_download"] = req.DefaultDownload upMap["default_download"] = req.DefaultDownload
upMap["retain_copies"] = req.RetainCopies upMap["retain_copies"] = req.RetainCopies
upMap["secret"] = req.Secret
return cronjobRepo.Update(id, upMap) return cronjobRepo.Update(id, upMap)
} }

View File

@ -41,7 +41,7 @@ func (u *CronjobService) handleApp(cronjob model.Cronjob, startTime time.Time) e
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap) record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("app/%s/%s", app.App.Key, app.Name)) backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("app/%s/%s", app.App.Key, app.Name))
record.FileName = fmt.Sprintf("app_%s_%s.tar.gz", app.Name, startTime.Format("20060102150405")+common.RandStrAndNum(5)) record.FileName = fmt.Sprintf("app_%s_%s.tar.gz", app.Name, startTime.Format("20060102150405")+common.RandStrAndNum(5))
if err := handleAppBackup(&app, backupDir, record.FileName, cronjob.ExclusionRules); err != nil { if err := handleAppBackup(&app, backupDir, record.FileName, cronjob.ExclusionRules, cronjob.Secret); err != nil {
return err return err
} }
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName)) downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName))
@ -74,7 +74,7 @@ func (u *CronjobService) handleWebsite(cronjob model.Cronjob, startTime time.Tim
record.Source, record.BackupType = loadRecordPath(cronjob, accountMap) record.Source, record.BackupType = loadRecordPath(cronjob, accountMap)
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s", web.PrimaryDomain)) backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("website/%s", web.PrimaryDomain))
record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", web.PrimaryDomain, startTime.Format("20060102150405")+common.RandStrAndNum(5)) record.FileName = fmt.Sprintf("website_%s_%s.tar.gz", web.PrimaryDomain, startTime.Format("20060102150405")+common.RandStrAndNum(5))
if err := handleWebsiteBackup(&web, backupDir, record.FileName, cronjob.ExclusionRules); err != nil { if err := handleWebsiteBackup(&web, backupDir, record.FileName, cronjob.ExclusionRules, cronjob.Secret); err != nil {
return err return err
} }
downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName)) downloadPath, err := u.uploadCronjobBackFile(cronjob, accountMap, path.Join(backupDir, record.FileName))
@ -138,7 +138,7 @@ func (u *CronjobService) handleDirectory(cronjob model.Cronjob, startTime time.T
} }
fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format("20060102150405")+common.RandStrAndNum(5)) fileName := fmt.Sprintf("directory%s_%s.tar.gz", strings.ReplaceAll(cronjob.SourceDir, "/", "_"), startTime.Format("20060102150405")+common.RandStrAndNum(5))
backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name)) backupDir := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s/%s", cronjob.Type, cronjob.Name))
if err := handleTar(cronjob.SourceDir, backupDir, fileName, cronjob.ExclusionRules); err != nil { if err := handleTar(cronjob.SourceDir, backupDir, fileName, cronjob.ExclusionRules, cronjob.Secret); err != nil {
return err return err
} }
var record model.BackupRecord var record model.BackupRecord
@ -169,7 +169,7 @@ func (u *CronjobService) handleSystemLog(cronjob model.Cronjob, startTime time.T
nameItem := startTime.Format("20060102150405") + common.RandStrAndNum(5) nameItem := startTime.Format("20060102150405") + common.RandStrAndNum(5)
fileName := fmt.Sprintf("system_log_%s.tar.gz", nameItem) fileName := fmt.Sprintf("system_log_%s.tar.gz", nameItem)
backupDir := path.Join(global.CONF.System.TmpDir, "log", nameItem) backupDir := path.Join(global.CONF.System.TmpDir, "log", nameItem)
if err := handleBackupLogs(backupDir, fileName); err != nil { if err := handleBackupLogs(backupDir, fileName, cronjob.Secret); err != nil {
return err return err
} }
var record model.BackupRecord var record model.BackupRecord
@ -210,7 +210,7 @@ func (u *CronjobService) handleSnapshot(cronjob model.Cronjob, startTime time.Ti
From: record.BackupType, From: record.BackupType,
DefaultDownload: cronjob.DefaultDownload, DefaultDownload: cronjob.DefaultDownload,
} }
name, err := NewISnapshotService().HandleSnapshot(true, logPath, req, startTime.Format("20060102150405")+common.RandStrAndNum(5)) name, err := NewISnapshotService().HandleSnapshot(true, logPath, req, startTime.Format("20060102150405")+common.RandStrAndNum(5), cronjob.Secret)
if err != nil { if err != nil {
return err return err
} }
@ -303,7 +303,7 @@ func loadRecordPath(cronjob model.Cronjob, accountMap map[string]cronjobUploadHe
return source, backupType return source, backupType
} }
func handleBackupLogs(targetDir, fileName string) error { func handleBackupLogs(targetDir, fileName string, secret string) error {
websites, err := websiteRepo.List() websites, err := websiteRepo.List()
if err != nil { if err != nil {
return err return err
@ -376,7 +376,7 @@ func handleBackupLogs(targetDir, fileName string) error {
} }
global.LOG.Debug("backup ssh log successful!") global.LOG.Debug("backup ssh log successful!")
if err := handleTar(targetDir, path.Dir(targetDir), fileName, ""); err != nil { if err := handleTar(targetDir, path.Dir(targetDir), fileName, "", secret); err != nil {
return err return err
} }
defer func() { defer func() {

View File

@ -130,7 +130,7 @@ func (u *CronjobService) handleNtpSync() error {
return nil return nil
} }
func handleTar(sourceDir, targetDir, name, exclusionRules string) error { func handleTar(sourceDir, targetDir, name, exclusionRules string, secret string) error {
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil { if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
return err return err
@ -158,8 +158,14 @@ func handleTar(sourceDir, targetDir, name, exclusionRules string) error {
path = sourceDir path = sourceDir
} }
commands := fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s %s", targetDir+"/"+name, excludeRules, path) commands := ""
global.LOG.Debug(commands) if secret != "" {
extraCmd := "| openssl enc -aes-256-cbc -salt -k " + secret + " -out"
commands = fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s %s %s", " -"+excludeRules, path, extraCmd, targetDir+"/"+name)
} else {
commands = fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s %s", targetDir+"/"+name, excludeRules, path)
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
stdout, err := cmd.ExecWithTimeOut(commands, 24*time.Hour) stdout, err := cmd.ExecWithTimeOut(commands, 24*time.Hour)
if err != nil { if err != nil {
if len(stdout) != 0 { if len(stdout) != 0 {
@ -170,15 +176,20 @@ func handleTar(sourceDir, targetDir, name, exclusionRules string) error {
return nil return nil
} }
func handleUnTar(sourceFile, targetDir string) error { func handleUnTar(sourceFile, targetDir string, secret string) error {
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil { if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
return err return err
} }
} }
commands := ""
commands := fmt.Sprintf("tar zxvfC %s %s", sourceFile, targetDir) if secret != "" {
global.LOG.Debug(commands) extraCmd := "openssl enc -d -aes-256-cbc -k " + secret + " -in " + sourceFile + " | "
commands = fmt.Sprintf("%s tar -zxvf - -C %s", extraCmd, targetDir+" > /dev/null 2>&1")
} else {
commands = fmt.Sprintf("tar -zxvf %s %s", sourceFile+" -C ", targetDir+" > /dev/null 2>&1")
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
stdout, err := cmd.ExecWithTimeOut(commands, 24*time.Hour) stdout, err := cmd.ExecWithTimeOut(commands, 24*time.Hour)
if err != nil { if err != nil {
global.LOG.Errorf("do handle untar failed, stdout: %s, err: %v", stdout, err) global.LOG.Errorf("do handle untar failed, stdout: %s, err: %v", stdout, err)

View File

@ -218,12 +218,12 @@ func (f *FileService) Compress(c request.FileCompress) error {
if !c.Replace && fo.Stat(filepath.Join(c.Dst, c.Name)) { if !c.Replace && fo.Stat(filepath.Join(c.Dst, c.Name)) {
return buserr.New(constant.ErrFileIsExit) return buserr.New(constant.ErrFileIsExit)
} }
return fo.Compress(c.Files, c.Dst, c.Name, files.CompressType(c.Type)) return fo.Compress(c.Files, c.Dst, c.Name, files.CompressType(c.Type), c.Secret)
} }
func (f *FileService) DeCompress(c request.FileDeCompress) error { func (f *FileService) DeCompress(c request.FileDeCompress) error {
fo := files.NewFileOp() fo := files.NewFileOp()
return fo.Decompress(c.Path, c.Dst, files.CompressType(c.Type)) return fo.Decompress(c.Path, c.Dst, files.CompressType(c.Type), c.Secret)
} }
func (f *FileService) GetContent(op request.FileContentReq) (response.FileInfo, error) { func (f *FileService) GetContent(op request.FileContentReq) (response.FileInfo, error) {
@ -325,7 +325,7 @@ func (f *FileService) FileDownload(d request.FileDownload) (string, error) {
return "", err return "", err
} }
fo := files.NewFileOp() fo := files.NewFileOp()
if err := fo.Compress(d.Paths, tempPath, d.Name, files.CompressType(d.Type)); err != nil { if err := fo.Compress(d.Paths, tempPath, d.Name, files.CompressType(d.Type), ""); err != nil {
return "", err return "", err
} }
filePath = filepath.Join(tempPath, d.Name) filePath = filepath.Join(tempPath, d.Name)

View File

@ -39,7 +39,7 @@ type ISnapshotService interface {
UpdateDescription(req dto.UpdateDescription) error UpdateDescription(req dto.UpdateDescription) error
readFromJson(path string) (SnapshotJson, error) readFromJson(path string) (SnapshotJson, error)
HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string) (string, error) HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string, secret string) (string, error)
} }
func NewISnapshotService() ISnapshotService { func NewISnapshotService() ISnapshotService {
@ -123,7 +123,7 @@ type SnapshotJson struct {
} }
func (u *SnapshotService) SnapshotCreate(req dto.SnapshotCreate) error { func (u *SnapshotService) SnapshotCreate(req dto.SnapshotCreate) error {
if _, err := u.HandleSnapshot(false, "", req, time.Now().Format("20060102150405")); err != nil { if _, err := u.HandleSnapshot(false, "", req, time.Now().Format("20060102150405"), req.Secret); err != nil {
return err return err
} }
return nil return nil
@ -180,7 +180,7 @@ func (u *SnapshotService) readFromJson(path string) (SnapshotJson, error) {
return snap, nil return snap, nil
} }
func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string) (string, error) { func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto.SnapshotCreate, timeNow string, secret string) (string, error) {
localDir, err := loadLocalDir() localDir, err := loadLocalDir()
if err != nil { if err != nil {
return "", err return "", err
@ -274,7 +274,7 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto
return return
} }
if snapStatus.Compress != constant.StatusDone { if snapStatus.Compress != constant.StatusDone {
snapCompress(itemHelper, rootDir) snapCompress(itemHelper, rootDir, secret)
} }
if snapStatus.Compress != constant.StatusDone { if snapStatus.Compress != constant.StatusDone {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
@ -305,7 +305,7 @@ func (u *SnapshotService) HandleSnapshot(isCronjob bool, logPath string, req dto
return snap.Name, fmt.Errorf("snapshot %s 1panel data failed", snap.Name) return snap.Name, fmt.Errorf("snapshot %s 1panel data failed", snap.Name)
} }
loadLogByStatus(snapStatus, logPath) loadLogByStatus(snapStatus, logPath)
snapCompress(itemHelper, rootDir) snapCompress(itemHelper, rootDir, secret)
if snapStatus.Compress != constant.StatusDone { if snapStatus.Compress != constant.StatusDone {
_ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed}) _ = snapshotRepo.Update(snap.ID, map[string]interface{}{"status": constant.StatusFailed})
loadLogByStatus(snapStatus, logPath) loadLogByStatus(snapStatus, logPath)
@ -383,15 +383,20 @@ func updateRecoverStatus(id uint, isRecover bool, interruptStep, status, message
} }
} }
func (u *SnapshotService) handleUnTar(sourceDir, targetDir string) error { func (u *SnapshotService) handleUnTar(sourceDir, targetDir string, secret string) error {
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil { if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
return err return err
} }
} }
commands := ""
commands := fmt.Sprintf("tar -zxf %s -C %s .", sourceDir, targetDir) if secret != "" {
global.LOG.Debug(commands) extraCmd := "openssl enc -d -aes-256-cbc -k " + secret + " -in " + sourceDir + " | "
commands = fmt.Sprintf("%s tar -zxvf - -C %s", extraCmd, targetDir+" > /dev/null 2>&1")
} else {
commands = fmt.Sprintf("tar -zxvf %s %s", sourceDir+" -C ", targetDir+" > /dev/null 2>&1")
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
stdout, err := cmd.ExecWithTimeOut(commands, 30*time.Minute) stdout, err := cmd.ExecWithTimeOut(commands, 30*time.Minute)
if err != nil { if err != nil {
if len(stdout) != 0 { if len(stdout) != 0 {

View File

@ -131,7 +131,7 @@ func snapBackup(snap snapHelper, localDir, targetDir string) {
defer snap.Wg.Done() defer snap.Wg.Done()
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"backup_data": constant.Running}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"backup_data": constant.Running})
status := constant.StatusDone status := constant.StatusDone
if err := handleSnapTar(localDir, targetDir, "1panel_backup.tar.gz", "./system;./system_snapshot;"); err != nil { if err := handleSnapTar(localDir, targetDir, "1panel_backup.tar.gz", "./system;./system_snapshot;", ""); err != nil {
status = err.Error() status = err.Error()
} }
snap.Status.BackupData = status snap.Status.BackupData = status
@ -158,7 +158,7 @@ func snapPanelData(snap snapHelper, localDir, targetDir string) {
sysIP, _ := settingRepo.Get(settingRepo.WithByKey("SystemIP")) sysIP, _ := settingRepo.Get(settingRepo.WithByKey("SystemIP"))
_ = settingRepo.Update("SystemIP", "") _ = settingRepo.Update("SystemIP", "")
checkPointOfWal() checkPointOfWal()
if err := handleSnapTar(dataDir, targetDir, "1panel_data.tar.gz", exclusionRules); err != nil { if err := handleSnapTar(dataDir, targetDir, "1panel_data.tar.gz", exclusionRules, ""); err != nil {
status = err.Error() status = err.Error()
} }
_ = snapshotRepo.Update(snap.SnapID, map[string]interface{}{"status": constant.StatusWaiting}) _ = snapshotRepo.Update(snap.SnapID, map[string]interface{}{"status": constant.StatusWaiting})
@ -168,11 +168,11 @@ func snapPanelData(snap snapHelper, localDir, targetDir string) {
_ = settingRepo.Update("SystemIP", sysIP.Value) _ = settingRepo.Update("SystemIP", sysIP.Value)
} }
func snapCompress(snap snapHelper, rootDir string) { func snapCompress(snap snapHelper, rootDir string, secret string) {
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": constant.StatusRunning}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": constant.StatusRunning})
tmpDir := path.Join(global.CONF.System.TmpDir, "system") tmpDir := path.Join(global.CONF.System.TmpDir, "system")
fileName := fmt.Sprintf("%s.tar.gz", path.Base(rootDir)) fileName := fmt.Sprintf("%s.tar.gz", path.Base(rootDir))
if err := snap.FileOp.Compress([]string{rootDir}, tmpDir, fileName, files.TarGz); err != nil { if err := handleSnapTar(rootDir, tmpDir, fileName, "", secret); err != nil {
snap.Status.Compress = err.Error() snap.Status.Compress = err.Error()
_ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": err.Error()}) _ = snapshotRepo.UpdateStatus(snap.Status.ID, map[string]interface{}{"compress": err.Error()})
return return
@ -221,7 +221,7 @@ func snapUpload(snap snapHelper, accounts string, file string) {
_ = os.Remove(source) _ = os.Remove(source)
} }
func handleSnapTar(sourceDir, targetDir, name, exclusionRules string) error { func handleSnapTar(sourceDir, targetDir, name, exclusionRules string, secret string) error {
if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(targetDir); err != nil && os.IsNotExist(err) {
if err = os.MkdirAll(targetDir, os.ModePerm); err != nil { if err = os.MkdirAll(targetDir, os.ModePerm); err != nil {
return err return err
@ -243,9 +243,25 @@ func handleSnapTar(sourceDir, targetDir, name, exclusionRules string) error {
exStr += exclude exStr += exclude
exMap[exclude] = struct{}{} exMap[exclude] = struct{}{}
} }
path := ""
commands := fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s -C %s .", targetDir+"/"+name, exStr, sourceDir) if strings.Contains(sourceDir, "/") {
global.LOG.Debug(commands) itemDir := strings.ReplaceAll(sourceDir[strings.LastIndex(sourceDir, "/"):], "/", "")
aheadDir := sourceDir[:strings.LastIndex(sourceDir, "/")]
if len(aheadDir) == 0 {
aheadDir = "/"
}
path += fmt.Sprintf("-C %s %s", aheadDir, itemDir)
} else {
path = sourceDir
}
commands := ""
if secret != "" {
extraCmd := "| openssl enc -aes-256-cbc -salt -k " + secret + " -out"
commands = fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s %s %s", " -"+exStr, path, extraCmd, targetDir+"/"+name)
} else {
commands = fmt.Sprintf("tar --warning=no-file-changed --ignore-failed-read -zcf %s %s -C %s .", targetDir+"/"+name, exStr, sourceDir)
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
stdout, err := cmd.ExecWithTimeOut(commands, 30*time.Minute) stdout, err := cmd.ExecWithTimeOut(commands, 30*time.Minute)
if err != nil { if err != nil {
if len(stdout) != 0 { if len(stdout) != 0 {

View File

@ -30,14 +30,6 @@ func (u *SnapshotService) HandleSnapshotRecover(snap model.Snapshot, isRecover b
if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) { if _, err := os.Stat(baseDir); err != nil && os.IsNotExist(err) {
_ = os.MkdirAll(baseDir, os.ModePerm) _ = os.MkdirAll(baseDir, os.ModePerm)
} }
if req.IsNew || snap.InterruptStep == "Backup" {
if err := backupBeforeRecover(snap); err != nil {
updateRecoverStatus(snap.ID, isRecover, "Backup", constant.StatusFailed, fmt.Sprintf("handle backup before recover failed, err: %v", err))
return
}
global.LOG.Debug("handle backup before recover successful!")
req.IsNew = true
}
if req.IsNew || snap.InterruptStep == "Download" || req.ReDownload { if req.IsNew || snap.InterruptStep == "Download" || req.ReDownload {
if err := handleDownloadSnapshot(snap, baseDir); err != nil { if err := handleDownloadSnapshot(snap, baseDir); err != nil {
updateRecoverStatus(snap.ID, isRecover, "Backup", constant.StatusFailed, err.Error()) updateRecoverStatus(snap.ID, isRecover, "Backup", constant.StatusFailed, err.Error())
@ -47,13 +39,21 @@ func (u *SnapshotService) HandleSnapshotRecover(snap model.Snapshot, isRecover b
req.IsNew = true req.IsNew = true
} }
if req.IsNew || snap.InterruptStep == "Decompress" { if req.IsNew || snap.InterruptStep == "Decompress" {
if err := handleUnTar(fmt.Sprintf("%s/%s.tar.gz", baseDir, snap.Name), baseDir); err != nil { if err := u.handleUnTar(fmt.Sprintf("%s/%s.tar.gz", baseDir, snap.Name), baseDir, req.Secret); err != nil {
updateRecoverStatus(snap.ID, isRecover, "Decompress", constant.StatusFailed, fmt.Sprintf("decompress file failed, err: %v", err)) updateRecoverStatus(snap.ID, isRecover, "Decompress", constant.StatusFailed, fmt.Sprintf("decompress file failed, err: %v", err))
return return
} }
global.LOG.Debug("decompress snapshot file successful!", baseDir) global.LOG.Debug("decompress snapshot file successful!", baseDir)
req.IsNew = true req.IsNew = true
} }
if req.IsNew || snap.InterruptStep == "Backup" {
if err := backupBeforeRecover(snap, ""); err != nil {
updateRecoverStatus(snap.ID, isRecover, "Backup", constant.StatusFailed, fmt.Sprintf("handle backup before recover failed, err: %v", err))
return
}
global.LOG.Debug("handle backup before recover successful!")
req.IsNew = true
}
snapFileDir = fmt.Sprintf("%s/%s", baseDir, snap.Name) snapFileDir = fmt.Sprintf("%s/%s", baseDir, snap.Name)
} else { } else {
snapFileDir = fmt.Sprintf("%s/1panel_original/original_%s", global.CONF.System.BaseDir, snap.Name) snapFileDir = fmt.Sprintf("%s/1panel_original/original_%s", global.CONF.System.BaseDir, snap.Name)
@ -114,7 +114,7 @@ func (u *SnapshotService) HandleSnapshotRecover(snap model.Snapshot, isRecover b
} }
if req.IsNew || snap.InterruptStep == "1PanelBackups" { if req.IsNew || snap.InterruptStep == "1PanelBackups" {
if err := u.handleUnTar(path.Join(snapFileDir, "/1panel/1panel_backup.tar.gz"), snapJson.BackupDataDir); err != nil { if err := u.handleUnTar(path.Join(snapFileDir, "/1panel/1panel_backup.tar.gz"), snapJson.BackupDataDir, ""); err != nil {
updateRecoverStatus(snap.ID, isRecover, "1PanelBackups", constant.StatusFailed, err.Error()) updateRecoverStatus(snap.ID, isRecover, "1PanelBackups", constant.StatusFailed, err.Error())
return return
} }
@ -124,7 +124,7 @@ func (u *SnapshotService) HandleSnapshotRecover(snap model.Snapshot, isRecover b
if req.IsNew || snap.InterruptStep == "1PanelData" { if req.IsNew || snap.InterruptStep == "1PanelData" {
checkPointOfWal() checkPointOfWal()
if err := u.handleUnTar(path.Join(snapFileDir, "/1panel/1panel_data.tar.gz"), path.Join(snapJson.BaseDir, "1panel")); err != nil { if err := u.handleUnTar(path.Join(snapFileDir, "/1panel/1panel_data.tar.gz"), path.Join(snapJson.BaseDir, "1panel"), ""); err != nil {
updateRecoverStatus(snap.ID, isRecover, "1PanelData", constant.StatusFailed, err.Error()) updateRecoverStatus(snap.ID, isRecover, "1PanelData", constant.StatusFailed, err.Error())
return return
} }
@ -146,7 +146,7 @@ func (u *SnapshotService) HandleSnapshotRecover(snap model.Snapshot, isRecover b
_, _ = cmd.Exec("systemctl daemon-reload && systemctl restart 1panel.service") _, _ = cmd.Exec("systemctl daemon-reload && systemctl restart 1panel.service")
} }
func backupBeforeRecover(snap model.Snapshot) error { func backupBeforeRecover(snap model.Snapshot, secret string) error {
baseDir := fmt.Sprintf("%s/1panel_original/original_%s", global.CONF.System.BaseDir, snap.Name) baseDir := fmt.Sprintf("%s/1panel_original/original_%s", global.CONF.System.BaseDir, snap.Name)
var wg sync.WaitGroup var wg sync.WaitGroup
var status model.SnapshotStatus var status model.SnapshotStatus

View File

@ -117,7 +117,7 @@ func (u *UpgradeService) Upgrade(req dto.Upgrade) error {
defer func() { defer func() {
_ = os.Remove(rootDir) _ = os.Remove(rootDir)
}() }()
if err := handleUnTar(rootDir+"/"+fileName, rootDir); err != nil { if err := handleUnTar(rootDir+"/"+fileName, rootDir, ""); err != nil {
global.LOG.Errorf("decompress file failed, err: %v", err) global.LOG.Errorf("decompress file failed, err: %v", err)
_ = settingRepo.Update("SystemStatus", "Free") _ = settingRepo.Update("SystemStatus", "Free")
return return
@ -175,7 +175,7 @@ func (u *UpgradeService) handleBackup(fileOp files.FileOp, originalDir string) e
return err return err
} }
checkPointOfWal() checkPointOfWal()
if err := handleTar(path.Join(global.CONF.System.BaseDir, "1panel/db"), originalDir, "db.tar.gz", "db/1Panel.db-*"); err != nil { if err := handleTar(path.Join(global.CONF.System.BaseDir, "1panel/db"), originalDir, "db.tar.gz", "db/1Panel.db-*", ""); err != nil {
return err return err
} }
return nil return nil
@ -191,7 +191,7 @@ func (u *UpgradeService) handleRollback(originalDir string, errStep int) {
} }
} }
if _, err := os.Stat(path.Join(originalDir, "db.tar.gz")); err == nil { if _, err := os.Stat(path.Join(originalDir, "db.tar.gz")); err == nil {
if err := handleUnTar(path.Join(originalDir, "db.tar.gz"), global.CONF.System.DbPath); err != nil { if err := handleUnTar(path.Join(originalDir, "db.tar.gz"), global.CONF.System.DbPath, ""); err != nil {
global.LOG.Errorf("rollback 1panel db failed, err: %v", err) global.LOG.Errorf("rollback 1panel db failed, err: %v", err)
} }
} }

View File

@ -550,7 +550,7 @@ func (w WebsiteSSLService) DownloadFile(id uint) (*os.File, error) {
return nil, err return nil, err
} }
fileName := websiteSSL.PrimaryDomain + ".zip" fileName := websiteSSL.PrimaryDomain + ".zip"
if err = fileOp.Compress([]string{path.Join(dir, "fullchain.pem"), path.Join(dir, "privkey.pem")}, dir, fileName, files.SdkZip); err != nil { if err = fileOp.Compress([]string{path.Join(dir, "fullchain.pem"), path.Join(dir, "privkey.pem")}, dir, fileName, files.SdkZip, ""); err != nil {
return nil, err return nil, err
} }
return os.Open(path.Join(dir, fileName)) return os.Open(path.Join(dir, fileName))

View File

@ -88,6 +88,7 @@ func Init() {
migrations.AddMonitorMenu, migrations.AddMonitorMenu,
migrations.AddFtp, migrations.AddFtp,
migrations.AddProxy, migrations.AddProxy,
migrations.AddCronJobColumn,
}) })
if err := m.Migrate(); err != nil { if err := m.Migrate(); err != nil {
global.LOG.Error(err) global.LOG.Error(err)

View File

@ -238,3 +238,13 @@ var AddProxy = &gormigrate.Migration{
return nil return nil
}, },
} }
var AddCronJobColumn = &gormigrate.Migration{
ID: "20240524-add-cronjob-command",
Migrate: func(tx *gorm.DB) error {
if err := tx.AutoMigrate(&model.Cronjob{}); err != nil {
return err
}
return nil
},
}

View File

@ -7,8 +7,8 @@ import (
) )
type ShellArchiver interface { type ShellArchiver interface {
Extract(filePath, dstDir string) error Extract(filePath, dstDir string, secret string) error
Compress(sourcePaths []string, dstFile string) error Compress(sourcePaths []string, dstFile string, secret string) error
} }
func NewShellArchiver(compressType CompressType) (ShellArchiver, error) { func NewShellArchiver(compressType CompressType) (ShellArchiver, error) {
@ -18,6 +18,8 @@ func NewShellArchiver(compressType CompressType) (ShellArchiver, error) {
return nil, err return nil, err
} }
return NewTarArchiver(compressType), nil return NewTarArchiver(compressType), nil
case TarGz:
return NewTarGzArchiver(), nil
case Zip: case Zip:
if err := checkCmdAvailability("zip"); err != nil { if err := checkCmdAvailability("zip"); err != nil {
return nil, err return nil, err

View File

@ -476,7 +476,7 @@ func getFormat(cType CompressType) archiver.CompressedArchive {
return format return format
} }
func (f FileOp) Compress(srcRiles []string, dst string, name string, cType CompressType) error { func (f FileOp) Compress(srcRiles []string, dst string, name string, cType CompressType, secret string) error {
format := getFormat(cType) format := getFormat(cType)
fileMaps := make(map[string]string, len(srcRiles)) fileMaps := make(map[string]string, len(srcRiles))
@ -505,7 +505,13 @@ func (f FileOp) Compress(srcRiles []string, dst string, name string, cType Compr
return nil return nil
} }
_ = f.DeleteFile(dstFile) _ = f.DeleteFile(dstFile)
return NewZipArchiver().Compress(srcRiles, dstFile) return NewZipArchiver().Compress(srcRiles, dstFile, "")
case TarGz:
err = NewTarGzArchiver().Compress(srcRiles, dstFile, secret)
if err != nil {
_ = f.DeleteFile(dstFile)
return err
}
default: default:
err = format.Archive(context.Background(), out, files) err = format.Archive(context.Background(), out, files)
if err != nil { if err != nil {
@ -583,14 +589,22 @@ func (f FileOp) decompressWithSDK(srcFile string, dst string, cType CompressType
return format.Extract(context.Background(), input, nil, handler) return format.Extract(context.Background(), input, nil, handler)
} }
func (f FileOp) Decompress(srcFile string, dst string, cType CompressType) error { func (f FileOp) Decompress(srcFile string, dst string, cType CompressType, secret string) error {
if err := f.decompressWithSDK(srcFile, dst, cType); err != nil { if err := f.decompressWithSDK(srcFile, dst, cType); err != nil {
if cType == Tar || cType == Zip { if cType == Tar || cType == Zip || cType == TarGz {
if secret != "" {
shellArchiver, err := NewShellArchiver(TarGz)
if err != nil {
return err
}
return shellArchiver.Extract(srcFile, dst, secret)
} else {
shellArchiver, err := NewShellArchiver(cType) shellArchiver, err := NewShellArchiver(cType)
if err != nil { if err != nil {
return err return err
} }
return shellArchiver.Extract(srcFile, dst) return shellArchiver.Extract(srcFile, dst, secret)
}
} }
return err return err
} }

View File

@ -17,11 +17,11 @@ func NewTarArchiver(compressType CompressType) ShellArchiver {
} }
} }
func (t TarArchiver) Extract(FilePath string, dstDir string) error { func (t TarArchiver) Extract(FilePath string, dstDir string, secret string) error {
return cmd.ExecCmd(fmt.Sprintf("%s %s %s -C %s", t.Cmd, t.getOptionStr("extract"), FilePath, dstDir)) return cmd.ExecCmd(fmt.Sprintf("%s %s %s -C %s", t.Cmd, t.getOptionStr("extract"), FilePath, dstDir))
} }
func (t TarArchiver) Compress(sourcePaths []string, dstFile string) error { func (t TarArchiver) Compress(sourcePaths []string, dstFile string, secret string) error {
return nil return nil
} }

View File

@ -0,0 +1,58 @@
package files
import (
"fmt"
"github.com/1Panel-dev/1Panel/backend/global"
"github.com/1Panel-dev/1Panel/backend/utils/cmd"
"path/filepath"
"strings"
)
type TarGzArchiver struct {
}
func NewTarGzArchiver() ShellArchiver {
return &TarGzArchiver{}
}
func (t TarGzArchiver) Extract(filePath, dstDir string, secret string) error {
var err error
commands := ""
if secret != "" {
extraCmd := "openssl enc -d -aes-256-cbc -k " + secret + " -in " + filePath + " | "
commands = fmt.Sprintf("%s tar -zxvf - -C %s", extraCmd, dstDir+" > /dev/null 2>&1")
} else {
commands = fmt.Sprintf("tar -zxvf %s %s", filePath+" -C ", dstDir+" > /dev/null 2>&1")
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
if err = cmd.ExecCmd(commands); err != nil {
return err
}
return nil
}
func (t TarGzArchiver) Compress(sourcePaths []string, dstFile string, secret string) error {
var err error
path := ""
itemDir := ""
for _, item := range sourcePaths {
itemDir += filepath.Base(item) + " "
}
aheadDir := dstFile[:strings.LastIndex(dstFile, "/")]
if len(aheadDir) == 0 {
aheadDir = "/"
}
path += fmt.Sprintf("- -C %s %s", aheadDir, itemDir)
commands := ""
if secret != "" {
extraCmd := "| openssl enc -aes-256-cbc -salt -k " + secret + " -out"
commands = fmt.Sprintf("tar -zcf %s %s %s", path, extraCmd, dstFile)
} else {
commands = fmt.Sprintf("tar -zcf %s %s", dstFile, path)
}
global.LOG.Debug(strings.ReplaceAll(commands, secret, "******"))
if err = cmd.ExecCmd(commands); err != nil {
return err
}
return nil
}

View File

@ -17,14 +17,14 @@ func NewZipArchiver() ShellArchiver {
return &ZipArchiver{} return &ZipArchiver{}
} }
func (z ZipArchiver) Extract(filePath, dstDir string) error { func (z ZipArchiver) Extract(filePath, dstDir string, secret string) error {
if err := checkCmdAvailability("unzip"); err != nil { if err := checkCmdAvailability("unzip"); err != nil {
return err return err
} }
return cmd.ExecCmd(fmt.Sprintf("unzip -qo %s -d %s", filePath, dstDir)) return cmd.ExecCmd(fmt.Sprintf("unzip -qo %s -d %s", filePath, dstDir))
} }
func (z ZipArchiver) Compress(sourcePaths []string, dstFile string) error { func (z ZipArchiver) Compress(sourcePaths []string, dstFile string, _ string) error {
var err error var err error
tmpFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s%s.zip", common.RandStr(50), time.Now().Format("20060102150405"))) tmpFile := path.Join(global.CONF.System.TmpDir, fmt.Sprintf("%s%s.zip", common.RandStr(50), time.Now().Format("20060102150405")))
op := NewFileOp() op := NewFileOp()

View File

@ -57,6 +57,7 @@ export namespace Backup {
type: string; type: string;
name: string; name: string;
detailName: string; detailName: string;
secret: string;
} }
export interface Recover { export interface Recover {
source: string; source: string;
@ -64,5 +65,6 @@ export namespace Backup {
name: string; name: string;
detailName: string; detailName: string;
file: string; file: string;
secret: string;
} }
} }

View File

@ -26,6 +26,7 @@ export namespace Cronjob {
backupAccountList: Array<string>; backupAccountList: Array<string>;
retainCopies: number; retainCopies: number;
status: string; status: string;
secret: string;
} }
export interface CronjobCreate { export interface CronjobCreate {
name: string; name: string;
@ -44,6 +45,7 @@ export namespace Cronjob {
backupAccounts: string; backupAccounts: string;
defaultDownload: string; defaultDownload: string;
retainCopies: number; retainCopies: number;
secret: string;
} }
export interface SpecObj { export interface SpecObj {
specType: string; specType: string;
@ -68,6 +70,7 @@ export namespace Cronjob {
backupAccounts: string; backupAccounts: string;
defaultDownload: string; defaultDownload: string;
retainCopies: number; retainCopies: number;
secret: string;
} }
export interface CronjobDelete { export interface CronjobDelete {
ids: Array<number>; ids: Array<number>;

View File

@ -80,12 +80,14 @@ export namespace File {
dst: string; dst: string;
name: string; name: string;
replace: boolean; replace: boolean;
secret: string;
} }
export interface FileDeCompress { export interface FileDeCompress {
path: string; path: string;
dst: string; dst: string;
type: string; type: string;
secret: string;
} }
export interface FileEdit { export interface FileEdit {

View File

@ -113,6 +113,7 @@ export namespace Setting {
fromAccounts: Array<string>; fromAccounts: Array<string>;
defaultDownload: string; defaultDownload: string;
description: string; description: string;
secret: string;
} }
export interface SnapshotImport { export interface SnapshotImport {
from: string; from: string;
@ -123,6 +124,7 @@ export namespace Setting {
id: number; id: number;
isNew: boolean; isNew: boolean;
reDownload: boolean; reDownload: boolean;
secret: string;
} }
export interface SnapshotInfo { export interface SnapshotInfo {
id: number; id: number;
@ -141,6 +143,7 @@ export namespace Setting {
rollbackStatus: string; rollbackStatus: string;
rollbackMessage: string; rollbackMessage: string;
lastRollbackedAt: string; lastRollbackedAt: string;
secret: string;
} }
export interface SnapshotStatus { export interface SnapshotStatus {
panel: string; panel: string;

View File

@ -43,6 +43,14 @@
<el-button type="primary" plain :disabled="selects.length === 0" @click="onBatchDelete(null)"> <el-button type="primary" plain :disabled="selects.length === 0" @click="onBatchDelete(null)">
{{ $t('commons.button.delete') }} {{ $t('commons.button.delete') }}
</el-button> </el-button>
<el-form-item
:label="$t('setting.compressPassword')"
prop="secret"
style="margin-top: 10px"
v-if="type === 'app' || type === 'website'"
>
<el-input v-model="secret"></el-input>
</el-form-item>
</template> </template>
<el-table-column type="selection" fix /> <el-table-column type="selection" fix />
<el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip /> <el-table-column :label="$t('commons.table.name')" prop="fileName" show-overflow-tooltip />
@ -105,6 +113,7 @@ const name = ref();
const detailName = ref(); const detailName = ref();
const backupPath = ref(); const backupPath = ref();
const status = ref(); const status = ref();
const secret = ref();
interface DialogProps { interface DialogProps {
type: string; type: string;
@ -180,6 +189,7 @@ const onBackup = async () => {
type: type.value, type: type.value,
name: name.value, name: name.value,
detailName: detailName.value, detailName: detailName.value,
secret: secret.value,
}; };
loading.value = true; loading.value = true;
await handleBackup(params) await handleBackup(params)
@ -209,6 +219,7 @@ const onRecover = async (row: Backup.RecordInfo) => {
name: name.value, name: name.value,
detailName: detailName.value, detailName: detailName.value,
file: row.fileDir + '/' + row.fileName, file: row.fileDir + '/' + row.fileName,
secret: secret.value,
}; };
loading.value = true; loading.value = true;
await handleRecover(params) await handleRecover(params)

View File

@ -68,6 +68,14 @@
> >
{{ $t('commons.button.delete') }} {{ $t('commons.button.delete') }}
</el-button> </el-button>
<el-form-item
:label="$t('setting.compressPassword')"
prop="secret"
style="margin-top: 10px"
v-if="type === 'app' || type === 'website'"
>
<el-input v-model="secret"></el-input>
</el-form-item>
</template> </template>
<el-table-column type="selection" fix /> <el-table-column type="selection" fix />
<el-table-column :label="$t('commons.table.name')" show-overflow-tooltip prop="name" /> <el-table-column :label="$t('commons.table.name')" show-overflow-tooltip prop="name" />
@ -129,6 +137,7 @@ const type = ref();
const name = ref(); const name = ref();
const detailName = ref(); const detailName = ref();
const remark = ref(); const remark = ref();
const secret = ref();
interface DialogProps { interface DialogProps {
type: string; type: string;
name: string; name: string;
@ -191,6 +200,7 @@ const onRecover = async (row: File.File) => {
name: name.value, name: name.value,
detailName: detailName.value, detailName: detailName.value,
file: baseDir.value + row.name, file: baseDir.value + row.name,
secret: secret.value,
}; };
loading.value = true; loading.value = true;
await handleRecoverByUpload(params) await handleRecoverByUpload(params)

View File

@ -1541,6 +1541,7 @@ const message = {
ifShow: 'Whether to Show', ifShow: 'Whether to Show',
menu: 'Menu', menu: 'Menu',
confirmMessage: 'The page will be refreshed to update the advanced menu list. Continue?', confirmMessage: 'The page will be refreshed to update the advanced menu list. Continue?',
compressPassword: 'Compression Password',
}, },
license: { license: {
community: 'Community Edition: ', community: 'Community Edition: ',

View File

@ -1435,6 +1435,7 @@ const message = {
ifShow: '是否顯示', ifShow: '是否顯示',
menu: '選單', menu: '選單',
confirmMessage: '即將刷新頁面更新高級功能菜單列表是否繼續', confirmMessage: '即將刷新頁面更新高級功能菜單列表是否繼續',
compressPassword: '壓縮密碼',
}, },
license: { license: {
community: '社區版', community: '社區版',

View File

@ -1436,6 +1436,7 @@ const message = {
ifShow: '是否显示', ifShow: '是否显示',
menu: '菜单', menu: '菜单',
confirmMessage: '即将刷新页面更新高级功能菜单列表是否继续', confirmMessage: '即将刷新页面更新高级功能菜单列表是否继续',
compressPassword: '压缩密码',
}, },
license: { license: {
community: '社区版', community: '社区版',

View File

@ -300,6 +300,13 @@
</el-link> </el-link>
</span> </span>
</el-form-item> </el-form-item>
<el-form-item
:label="$t('setting.compressPassword')"
prop="secret"
v-if="isBackup() && dialogData.rowData!.type !== 'database'"
>
<el-input v-model="dialogData.rowData!.secret" />
</el-form-item>
<el-form-item :label="$t('cronjob.default_download_path')" prop="defaultDownload"> <el-form-item :label="$t('cronjob.default_download_path')" prop="defaultDownload">
<el-select class="selectClass" v-model="dialogData.rowData!.defaultDownload"> <el-select class="selectClass" v-model="dialogData.rowData!.defaultDownload">
<div v-for="item in accountOptions" :key="item.label"> <div v-for="item in accountOptions" :key="item.label">

View File

@ -37,6 +37,9 @@
</template> </template>
</el-input> </el-input>
</el-form-item> </el-form-item>
<el-form-item :label="$t('setting.compressPassword')" prop="secret" v-if="form.type === 'tar.gz'">
<el-input v-model="form.secret"></el-input>
</el-form-item>
<el-form-item> <el-form-item>
<el-checkbox v-model="form.replace" :label="$t('file.replace')"></el-checkbox> <el-checkbox v-model="form.replace" :label="$t('file.replace')"></el-checkbox>
</el-form-item> </el-form-item>
@ -79,7 +82,7 @@ const rules = reactive<FormRules>({
const fileForm = ref<FormInstance>(); const fileForm = ref<FormInstance>();
const loading = ref(false); const loading = ref(false);
const form = ref<File.FileCompress>({ files: [], type: 'zip', dst: '', name: '', replace: false }); const form = ref<File.FileCompress>({ files: [], type: 'zip', dst: '', name: '', replace: false, secret: '' });
const options = ref<string[]>([]); const options = ref<string[]>([]);
const open = ref(false); const open = ref(false);
const title = ref(''); const title = ref('');

View File

@ -30,6 +30,9 @@
</template> </template>
</el-input> </el-input>
</el-form-item> </el-form-item>
<el-form-item :label="$t('setting.compressPassword')" prop="secret" v-if="name.includes('tar.gz')">
<el-input v-model="form.secret"></el-input>
</el-form-item>
</el-form> </el-form>
</el-col> </el-col>
</el-row> </el-row>

View File

@ -146,6 +146,9 @@
/> />
</el-select> </el-select>
</el-form-item> </el-form-item>
<el-form-item :label="$t('setting.compressPassword')" prop="secret">
<el-input v-model="snapInfo.secret"></el-input>
</el-form-item>
<el-form-item :label="$t('commons.table.description')" prop="description"> <el-form-item :label="$t('commons.table.description')" prop="description">
<el-input type="textarea" clearable v-model="snapInfo.description" /> <el-input type="textarea" clearable v-model="snapInfo.description" />
</el-form-item> </el-form-item>
@ -231,6 +234,7 @@ let snapInfo = reactive<Setting.SnapshotCreate>({
defaultDownload: '', defaultDownload: '',
fromAccounts: [], fromAccounts: [],
description: '', description: '',
secret: '',
}); });
const cleanData = ref(); const cleanData = ref();

View File

@ -10,6 +10,9 @@
<el-col :span="22"> <el-col :span="22">
<span class="card-title">{{ $t('setting.recover') }}</span> <span class="card-title">{{ $t('setting.recover') }}</span>
<el-divider class="divider" /> <el-divider class="divider" />
<el-form-item :label="$t('setting.compressPassword')" prop="secret">
<el-input v-model="snapInfo.secret"></el-input>
</el-form-item>
<div v-if="!snapInfo.recoverStatus && !snapInfo.lastRecoveredAt"> <div v-if="!snapInfo.recoverStatus && !snapInfo.lastRecoveredAt">
<el-alert center class="alert" style="height: 257px" :closable="false"> <el-alert center class="alert" style="height: 257px" :closable="false">
<el-button size="large" round plain type="primary" @click="recoverSnapshot(true)"> <el-button size="large" round plain type="primary" @click="recoverSnapshot(true)">
@ -197,7 +200,12 @@ const handleClose = () => {
const doRecover = async (isNew: boolean) => { const doRecover = async (isNew: boolean) => {
loading.value = true; loading.value = true;
await snapshotRecover({ id: snapInfo.value.id, isNew: isNew, reDownload: reDownload.value }) await snapshotRecover({
id: snapInfo.value.id,
isNew: isNew,
reDownload: reDownload.value,
secret: snapInfo.value.secret,
})
.then(() => { .then(() => {
emit('search'); emit('search');
loading.value = false; loading.value = false;