2024-07-23 14:48:37 +08:00
|
|
|
package service
|
|
|
|
|
|
|
|
import (
|
2025-01-14 14:27:51 +08:00
|
|
|
"bufio"
|
2024-08-19 18:04:43 +08:00
|
|
|
"encoding/base64"
|
2024-07-23 14:48:37 +08:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
|
|
|
"path"
|
2024-08-19 18:04:43 +08:00
|
|
|
"strconv"
|
2024-07-23 14:48:37 +08:00
|
|
|
"strings"
|
2025-01-14 14:27:51 +08:00
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/1Panel-dev/1Panel/agent/app/repo"
|
2024-07-23 14:48:37 +08:00
|
|
|
|
|
|
|
"github.com/1Panel-dev/1Panel/agent/app/dto"
|
|
|
|
"github.com/1Panel-dev/1Panel/agent/app/model"
|
2024-08-19 18:04:43 +08:00
|
|
|
"github.com/1Panel-dev/1Panel/agent/buserr"
|
2024-07-23 14:48:37 +08:00
|
|
|
"github.com/1Panel-dev/1Panel/agent/constant"
|
|
|
|
"github.com/1Panel-dev/1Panel/agent/global"
|
|
|
|
"github.com/1Panel-dev/1Panel/agent/utils/cloud_storage"
|
2025-01-14 14:27:51 +08:00
|
|
|
"github.com/1Panel-dev/1Panel/agent/utils/cloud_storage/client"
|
2024-08-14 10:15:55 +08:00
|
|
|
"github.com/1Panel-dev/1Panel/agent/utils/encrypt"
|
2025-01-14 14:27:51 +08:00
|
|
|
"github.com/1Panel-dev/1Panel/agent/utils/files"
|
2024-07-23 14:48:37 +08:00
|
|
|
"github.com/jinzhu/copier"
|
|
|
|
"github.com/pkg/errors"
|
|
|
|
)
|
|
|
|
|
|
|
|
type BackupService struct{}
|
|
|
|
|
|
|
|
type IBackupService interface {
|
2024-08-19 18:04:43 +08:00
|
|
|
CheckUsed(id uint) error
|
2024-12-26 14:13:41 +08:00
|
|
|
Sync(req dto.SyncFromMaster) error
|
|
|
|
|
|
|
|
LoadBackupOptions() ([]dto.BackupOption, error)
|
2025-01-14 14:27:51 +08:00
|
|
|
SearchWithPage(search dto.SearchPageWithType) (int64, interface{}, error)
|
|
|
|
Create(backupDto dto.BackupOperate) error
|
|
|
|
GetBuckets(backupDto dto.ForBuckets) ([]interface{}, error)
|
|
|
|
Update(req dto.BackupOperate) error
|
|
|
|
Delete(id uint) error
|
|
|
|
RefreshToken(req dto.OperateByID) error
|
|
|
|
GetLocalDir() (string, error)
|
2024-07-23 14:48:37 +08:00
|
|
|
|
|
|
|
MysqlBackup(db dto.CommonBackup) error
|
|
|
|
PostgresqlBackup(db dto.CommonBackup) error
|
|
|
|
MysqlRecover(db dto.CommonRecover) error
|
|
|
|
PostgresqlRecover(db dto.CommonRecover) error
|
|
|
|
MysqlRecoverByUpload(req dto.CommonRecover) error
|
|
|
|
PostgresqlRecoverByUpload(req dto.CommonRecover) error
|
|
|
|
|
|
|
|
RedisBackup(db dto.CommonBackup) error
|
|
|
|
RedisRecover(db dto.CommonRecover) error
|
|
|
|
|
|
|
|
WebsiteBackup(db dto.CommonBackup) error
|
|
|
|
WebsiteRecover(req dto.CommonRecover) error
|
|
|
|
|
|
|
|
AppBackup(db dto.CommonBackup) (*model.BackupRecord, error)
|
|
|
|
AppRecover(req dto.CommonRecover) error
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewIBackupService() IBackupService {
|
|
|
|
return &BackupService{}
|
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) GetLocalDir() (string, error) {
|
|
|
|
account, err := backupRepo.Get(repo.WithByType(constant.Local))
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
2024-12-26 14:13:41 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return account.BackupPath, nil
|
2024-12-26 14:13:41 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) SearchWithPage(req dto.SearchPageWithType) (int64, interface{}, error) {
|
|
|
|
options := []repo.DBOption{repo.WithOrderBy("created_at desc")}
|
|
|
|
if len(req.Type) != 0 {
|
|
|
|
options = append(options, repo.WithByType(req.Type))
|
|
|
|
}
|
|
|
|
if len(req.Info) != 0 {
|
|
|
|
options = append(options, repo.WithByType(req.Info))
|
|
|
|
}
|
|
|
|
count, accounts, err := backupRepo.Page(req.Page, req.PageSize, options...)
|
2024-12-26 14:13:41 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
return 0, nil, err
|
2024-12-26 14:13:41 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
var data []dto.BackupInfo
|
2024-12-26 14:13:41 +08:00
|
|
|
for _, account := range accounts {
|
2025-01-14 14:27:51 +08:00
|
|
|
var item dto.BackupInfo
|
2024-12-26 14:13:41 +08:00
|
|
|
if err := copier.Copy(&item, &account); err != nil {
|
|
|
|
global.LOG.Errorf("copy backup account to dto backup info failed, err: %v", err)
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
if item.Type != constant.Sftp && item.Type != constant.Local {
|
|
|
|
item.BackupPath = path.Join("/", strings.TrimPrefix(item.BackupPath, "/"))
|
|
|
|
}
|
|
|
|
if !item.RememberAuth {
|
|
|
|
item.AccessKey = ""
|
|
|
|
item.Credential = ""
|
|
|
|
if account.Type == constant.Sftp {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(item.Vars), &varMap); err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
delete(varMap, "passPhrase")
|
|
|
|
itemVars, _ := json.Marshal(varMap)
|
|
|
|
item.Vars = string(itemVars)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
item.AccessKey = base64.StdEncoding.EncodeToString([]byte(item.AccessKey))
|
|
|
|
item.Credential = base64.StdEncoding.EncodeToString([]byte(item.Credential))
|
|
|
|
}
|
|
|
|
|
|
|
|
if account.Type == constant.OneDrive || account.Type == constant.ALIYUN || account.Type == constant.GoogleDrive {
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(item.Vars), &varMap); err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
delete(varMap, "refresh_token")
|
|
|
|
delete(varMap, "drive_id")
|
|
|
|
itemVars, _ := json.Marshal(varMap)
|
|
|
|
item.Vars = string(itemVars)
|
|
|
|
}
|
2024-12-26 14:13:41 +08:00
|
|
|
data = append(data, item)
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return count, data, nil
|
2024-12-26 14:13:41 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) Create(req dto.BackupOperate) error {
|
|
|
|
if req.Type == constant.Local {
|
|
|
|
return buserr.New(constant.ErrBackupLocalCreate)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
if req.Type != constant.Sftp && req.BackupPath != "/" {
|
|
|
|
req.BackupPath = strings.TrimPrefix(req.BackupPath, "/")
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
backup, _ := backupRepo.Get(repo.WithByName(req.Name))
|
|
|
|
if backup.ID != 0 {
|
|
|
|
return constant.ErrRecordExist
|
|
|
|
}
|
|
|
|
if err := copier.Copy(&backup, &req); err != nil {
|
|
|
|
return errors.WithMessage(constant.ErrStructTransform, err.Error())
|
|
|
|
}
|
|
|
|
itemAccessKey, err := base64.StdEncoding.DecodeString(backup.AccessKey)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
return err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
backup.AccessKey = string(itemAccessKey)
|
|
|
|
itemCredential, err := base64.StdEncoding.DecodeString(backup.Credential)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
backup.Credential = string(itemCredential)
|
2024-07-23 14:48:37 +08:00
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
if req.Type == constant.OneDrive || req.Type == constant.GoogleDrive {
|
|
|
|
if err := loadRefreshTokenByCode(&backup); err != nil {
|
|
|
|
return err
|
2024-08-19 18:04:43 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
}
|
|
|
|
if req.Type != "LOCAL" {
|
|
|
|
isOk, err := u.checkBackupConn(&backup)
|
|
|
|
if err != nil || !isOk {
|
|
|
|
return buserr.WithMap(constant.ErrBackupCheck, map[string]interface{}{"err": err.Error()}, err)
|
2024-08-19 18:04:43 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
backup.AccessKey, err = encrypt.StringEncrypt(backup.AccessKey)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
backup.Credential, err = encrypt.StringEncrypt(backup.Credential)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := backupRepo.Create(&backup); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) GetBuckets(req dto.ForBuckets) ([]interface{}, error) {
|
|
|
|
itemAccessKey, err := base64.StdEncoding.DecodeString(req.AccessKey)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
return nil, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
req.AccessKey = string(itemAccessKey)
|
|
|
|
itemCredential, err := base64.StdEncoding.DecodeString(req.Credential)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
req.Credential = string(itemCredential)
|
|
|
|
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(req.Vars), &varMap); err != nil {
|
|
|
|
return nil, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
switch req.Type {
|
|
|
|
case constant.Sftp, constant.WebDAV:
|
|
|
|
varMap["username"] = req.AccessKey
|
|
|
|
varMap["password"] = req.Credential
|
|
|
|
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
|
|
|
|
varMap["accessKey"] = req.AccessKey
|
|
|
|
varMap["secretKey"] = req.Credential
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
client, err := cloud_storage.NewCloudStorageClient(req.Type, varMap)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return client.ListBuckets()
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) Delete(id uint) error {
|
|
|
|
backup, _ := backupRepo.Get(repo.WithByID(id))
|
|
|
|
if backup.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
if backup.Type == constant.Local {
|
|
|
|
return buserr.New(constant.ErrBackupLocalDelete)
|
|
|
|
}
|
|
|
|
return backupRepo.Delete(repo.WithByID(id))
|
|
|
|
}
|
2024-07-23 14:48:37 +08:00
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) Update(req dto.BackupOperate) error {
|
|
|
|
backup, _ := backupRepo.Get(repo.WithByID(req.ID))
|
|
|
|
if backup.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
|
|
|
}
|
|
|
|
if req.Type != constant.Sftp && req.Type != constant.Local && req.BackupPath != "/" {
|
|
|
|
req.BackupPath = strings.TrimPrefix(req.BackupPath, "/")
|
|
|
|
}
|
|
|
|
var newBackup model.BackupAccount
|
|
|
|
if err := copier.Copy(&newBackup, &req); err != nil {
|
|
|
|
return errors.WithMessage(constant.ErrStructTransform, err.Error())
|
|
|
|
}
|
|
|
|
itemAccessKey, err := base64.StdEncoding.DecodeString(newBackup.AccessKey)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
newBackup.AccessKey = string(itemAccessKey)
|
|
|
|
itemCredential, err := base64.StdEncoding.DecodeString(newBackup.Credential)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
newBackup.Credential = string(itemCredential)
|
|
|
|
if backup.Type == constant.Local {
|
|
|
|
if newBackup.Vars != backup.Vars {
|
|
|
|
oldPath := backup.BackupPath
|
|
|
|
newPath := newBackup.BackupPath
|
|
|
|
if strings.HasSuffix(newPath, "/") && newPath != "/" {
|
|
|
|
newPath = newPath[:strings.LastIndex(newPath, "/")]
|
|
|
|
}
|
|
|
|
if err := files.NewFileOp().CopyDir(oldPath, newPath); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-07-23 14:48:37 +08:00
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
if newBackup.Type == constant.OneDrive || newBackup.Type == constant.GoogleDrive {
|
|
|
|
if err := loadRefreshTokenByCode(&backup); err != nil {
|
|
|
|
return err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
}
|
|
|
|
if backup.Type != "LOCAL" {
|
|
|
|
isOk, err := u.checkBackupConn(&newBackup)
|
|
|
|
if err != nil || !isOk {
|
|
|
|
return buserr.WithMap("ErrBackupCheck", map[string]interface{}{"err": err.Error()}, err)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
newBackup.AccessKey, err = encrypt.StringEncrypt(newBackup.AccessKey)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
newBackup.Credential, err = encrypt.StringEncrypt(newBackup.Credential)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
newBackup.ID = backup.ID
|
|
|
|
if err := backupRepo.Save(&newBackup); err != nil {
|
|
|
|
return err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return nil
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) RefreshToken(req dto.OperateByID) error {
|
|
|
|
backup, _ := backupRepo.Get(repo.WithByID(req.ID))
|
|
|
|
if backup.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
|
|
|
}
|
|
|
|
varMap := make(map[string]interface{})
|
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return fmt.Errorf("Failed to refresh %s - %s token, please retry, err: %v", backup.Type, backup.Name, err)
|
|
|
|
}
|
|
|
|
var (
|
|
|
|
refreshToken string
|
|
|
|
err error
|
2024-11-30 19:42:10 +08:00
|
|
|
)
|
2025-01-14 14:27:51 +08:00
|
|
|
switch backup.Type {
|
|
|
|
case constant.OneDrive:
|
|
|
|
refreshToken, err = client.RefreshToken("refresh_token", "refreshToken", varMap)
|
|
|
|
case constant.GoogleDrive:
|
|
|
|
refreshToken, err = client.RefreshGoogleToken("refresh_token", "refreshToken", varMap)
|
|
|
|
case constant.ALIYUN:
|
|
|
|
refreshToken, err = client.RefreshALIToken(varMap)
|
|
|
|
}
|
2024-11-30 19:42:10 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
varMap["refresh_status"] = constant.StatusFailed
|
|
|
|
varMap["refresh_msg"] = err.Error()
|
|
|
|
return fmt.Errorf("Failed to refresh %s-%s token, please retry, err: %v", backup.Type, backup.Name, err)
|
2024-11-30 19:42:10 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
varMap["refresh_status"] = constant.StatusSuccess
|
|
|
|
varMap["refresh_time"] = time.Now().Format(constant.DateTimeLayout)
|
|
|
|
varMap["refresh_token"] = refreshToken
|
|
|
|
|
|
|
|
varsItem, _ := json.Marshal(varMap)
|
|
|
|
backup.Vars = string(varsItem)
|
|
|
|
return backupRepo.Save(&backup)
|
2024-11-30 19:42:10 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) checkBackupConn(backup *model.BackupAccount) (bool, error) {
|
|
|
|
client, err := newClient(backup)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
return false, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
fileItem := path.Join(global.CONF.System.BaseDir, "1panel/tmp/test/1panel")
|
|
|
|
if _, err := os.Stat(path.Dir(fileItem)); err != nil && os.IsNotExist(err) {
|
|
|
|
if err = os.MkdirAll(path.Dir(fileItem), os.ModePerm); err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
file, err := os.OpenFile(fileItem, os.O_WRONLY|os.O_CREATE, constant.FilePerm)
|
2024-07-23 14:48:37 +08:00
|
|
|
if err != nil {
|
2025-01-14 14:27:51 +08:00
|
|
|
return false, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
defer file.Close()
|
|
|
|
write := bufio.NewWriter(file)
|
|
|
|
_, _ = write.WriteString("1Panel 备份账号测试文件。\n")
|
|
|
|
_, _ = write.WriteString("1Panel 備份賬號測試文件。\n")
|
|
|
|
_, _ = write.WriteString("1Panel Backs up account test files.\n")
|
|
|
|
_, _ = write.WriteString("1Panelアカウントのテストファイルをバックアップします。\n")
|
|
|
|
write.Flush()
|
|
|
|
|
|
|
|
targetPath := path.Join(backup.BackupPath, "test/1panel")
|
|
|
|
if backup.Type != constant.Sftp && backup.Type != constant.Local && targetPath != "/" {
|
|
|
|
targetPath = strings.TrimPrefix(targetPath, "/")
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return client.Upload(fileItem, targetPath)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) Sync(req dto.SyncFromMaster) error {
|
|
|
|
var accountItem model.BackupAccount
|
|
|
|
if err := json.Unmarshal([]byte(req.Data), &accountItem); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
accountItem.AccessKey, _ = encrypt.StringEncryptWithBase64(accountItem.AccessKey)
|
|
|
|
accountItem.Credential, _ = encrypt.StringEncryptWithBase64(accountItem.Credential)
|
|
|
|
account, _ := backupRepo.Get(repo.WithByName(req.Name))
|
|
|
|
switch req.Operation {
|
|
|
|
case "create":
|
|
|
|
if account.ID != 0 {
|
|
|
|
accountItem.ID = account.ID
|
|
|
|
return backupRepo.Save(&accountItem)
|
|
|
|
}
|
|
|
|
return backupRepo.Create(&accountItem)
|
|
|
|
case "delete":
|
|
|
|
if account.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
|
|
|
}
|
|
|
|
return backupRepo.Delete(repo.WithByID(account.ID))
|
|
|
|
case "update":
|
|
|
|
if account.ID == 0 {
|
|
|
|
return constant.ErrRecordNotFound
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
accountItem.ID = account.ID
|
|
|
|
return backupRepo.Save(&accountItem)
|
|
|
|
default:
|
|
|
|
return fmt.Errorf("not support such operation %s", req.Operation)
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (u *BackupService) LoadBackupOptions() ([]dto.BackupOption, error) {
|
|
|
|
accounts, err := backupRepo.List(repo.WithOrderBy("created_at desc"))
|
2024-08-14 10:15:55 +08:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
var data []dto.BackupOption
|
|
|
|
for _, account := range accounts {
|
|
|
|
var item dto.BackupOption
|
|
|
|
if err := copier.Copy(&item, &account); err != nil {
|
|
|
|
global.LOG.Errorf("copy backup account to dto backup info failed, err: %v", err)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
data = append(data, item)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return data, nil
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func (u *BackupService) CheckUsed(id uint) error {
|
|
|
|
cronjobs, _ := cronjobRepo.List()
|
|
|
|
for _, job := range cronjobs {
|
|
|
|
if job.DownloadAccountID == id {
|
|
|
|
return buserr.New(constant.ErrBackupInUsed)
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
ids := strings.Split(job.SourceAccountIDs, ",")
|
|
|
|
for _, idItem := range ids {
|
|
|
|
if idItem == fmt.Sprintf("%v", id) {
|
|
|
|
return buserr.New(constant.ErrBackupInUsed)
|
|
|
|
}
|
2024-08-14 10:15:55 +08:00
|
|
|
}
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewBackupClientWithID(id uint) (*model.BackupAccount, cloud_storage.CloudStorageClient, error) {
|
|
|
|
account, _ := backupRepo.Get(repo.WithByID(id))
|
2024-08-13 15:33:34 +08:00
|
|
|
backClient, err := newClient(&account)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
return &account, backClient, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type backupClientHelper struct {
|
2024-08-14 10:15:55 +08:00
|
|
|
id uint
|
|
|
|
accountType string
|
|
|
|
name string
|
|
|
|
backupPath string
|
|
|
|
client cloud_storage.CloudStorageClient
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewBackupClientMap(ids []string) (map[string]backupClientHelper, error) {
|
2024-08-14 10:15:55 +08:00
|
|
|
var accounts []model.BackupAccount
|
2025-01-14 14:27:51 +08:00
|
|
|
var idItems []uint
|
|
|
|
for i := 0; i < len(ids); i++ {
|
|
|
|
item, _ := strconv.Atoi(ids[i])
|
|
|
|
idItems = append(idItems, uint(item))
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
accounts, _ = backupRepo.List(repo.WithByIDs(idItems))
|
2024-08-13 15:33:34 +08:00
|
|
|
clientMap := make(map[string]backupClientHelper)
|
|
|
|
for _, item := range accounts {
|
|
|
|
backClient, err := newClient(&item)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-08-14 10:15:55 +08:00
|
|
|
clientMap[fmt.Sprintf("%v", item.ID)] = backupClientHelper{
|
|
|
|
client: backClient,
|
|
|
|
name: item.Name,
|
2025-01-14 14:27:51 +08:00
|
|
|
backupPath: item.BackupPath,
|
2024-08-14 10:15:55 +08:00
|
|
|
accountType: item.Type,
|
|
|
|
id: item.ID,
|
|
|
|
}
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
|
|
|
return clientMap, nil
|
|
|
|
}
|
|
|
|
|
2024-08-14 10:15:55 +08:00
|
|
|
func newClient(account *model.BackupAccount) (cloud_storage.CloudStorageClient, error) {
|
2024-08-13 15:33:34 +08:00
|
|
|
varMap := make(map[string]interface{})
|
2025-01-14 14:27:51 +08:00
|
|
|
if len(account.Vars) != 0 {
|
|
|
|
if err := json.Unmarshal([]byte(account.Vars), &varMap); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
|
|
|
varMap["bucket"] = account.Bucket
|
2025-01-14 14:27:51 +08:00
|
|
|
varMap["backupPath"] = account.BackupPath
|
|
|
|
account.AccessKey, _ = encrypt.StringDecrypt(account.AccessKey)
|
|
|
|
account.Credential, _ = encrypt.StringDecrypt(account.Credential)
|
2024-08-13 15:33:34 +08:00
|
|
|
switch account.Type {
|
|
|
|
case constant.Sftp, constant.WebDAV:
|
|
|
|
varMap["username"] = account.AccessKey
|
|
|
|
varMap["password"] = account.Credential
|
|
|
|
case constant.OSS, constant.S3, constant.MinIo, constant.Cos, constant.Kodo:
|
|
|
|
varMap["accessKey"] = account.AccessKey
|
|
|
|
varMap["secretKey"] = account.Credential
|
2024-10-31 22:22:03 +08:00
|
|
|
case constant.UPYUN:
|
|
|
|
varMap["operator"] = account.AccessKey
|
|
|
|
varMap["password"] = account.Credential
|
2024-08-13 15:33:34 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
client, err := cloud_storage.NewCloudStorageClient(account.Type, varMap)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2024-08-13 15:33:34 +08:00
|
|
|
return client, nil
|
2024-08-09 18:30:39 +08:00
|
|
|
}
|
|
|
|
|
2025-01-14 14:27:51 +08:00
|
|
|
func loadRefreshTokenByCode(backup *model.BackupAccount) error {
|
2024-07-23 14:48:37 +08:00
|
|
|
varMap := make(map[string]interface{})
|
2025-01-14 14:27:51 +08:00
|
|
|
if err := json.Unmarshal([]byte(backup.Vars), &varMap); err != nil {
|
|
|
|
return fmt.Errorf("unmarshal backup vars failed, err: %v", err)
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
refreshToken := ""
|
|
|
|
var err error
|
|
|
|
if backup.Type == constant.GoogleDrive {
|
|
|
|
refreshToken, err = client.RefreshGoogleToken("authorization_code", "refreshToken", varMap)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
refreshToken, err = client.RefreshToken("authorization_code", "refreshToken", varMap)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
delete(varMap, "code")
|
|
|
|
varMap["refresh_status"] = constant.StatusSuccess
|
|
|
|
varMap["refresh_time"] = time.Now().Format(constant.DateTimeLayout)
|
|
|
|
varMap["refresh_token"] = refreshToken
|
|
|
|
itemVars, err := json.Marshal(varMap)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("json marshal var map failed, err: %v", err)
|
|
|
|
}
|
|
|
|
backup.Vars = string(itemVars)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func loadBackupNamesByID(accountIDs string, downloadID uint) ([]string, string, error) {
|
|
|
|
accountIDList := strings.Split(accountIDs, ",")
|
|
|
|
var ids []uint
|
|
|
|
for _, item := range accountIDList {
|
|
|
|
if len(item) != 0 {
|
|
|
|
itemID, _ := strconv.Atoi(item)
|
|
|
|
ids = append(ids, uint(itemID))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
list, err := backupRepo.List(repo.WithByIDs(ids))
|
|
|
|
if err != nil {
|
|
|
|
return nil, "", err
|
|
|
|
}
|
|
|
|
var accounts []string
|
|
|
|
var downloadAccount string
|
|
|
|
for _, item := range list {
|
|
|
|
itemName := fmt.Sprintf("%s - %s", item.Type, item.Name)
|
|
|
|
accounts = append(accounts, itemName)
|
|
|
|
if item.ID == downloadID {
|
|
|
|
downloadAccount = itemName
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|
|
|
|
}
|
2025-01-14 14:27:51 +08:00
|
|
|
return accounts, downloadAccount, nil
|
2024-07-23 14:48:37 +08:00
|
|
|
}
|