mirror of
https://github.com/1Panel-dev/1Panel.git
synced 2025-03-01 03:24:14 +08:00
fix: Fix ollama model log exception (#8037)
This commit is contained in:
parent
2dec061603
commit
5963e28b25
@ -24,7 +24,7 @@ func (b *BaseApi) CreateOllamaModel(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := aiToolService.Create(req.Name); err != nil {
|
||||
if err := aiToolService.Create(req); err != nil {
|
||||
helper.BadRequest(c, err)
|
||||
return
|
||||
}
|
||||
@ -46,7 +46,7 @@ func (b *BaseApi) RecreateOllamaModel(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := aiToolService.Recreate(req.Name); err != nil {
|
||||
if err := aiToolService.Recreate(req); err != nil {
|
||||
helper.BadRequest(c, err)
|
||||
return
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ type OllamaModelDropList struct {
|
||||
|
||||
type OllamaModelName struct {
|
||||
Name string `json:"name"`
|
||||
TaskID string `json:"taskID"`
|
||||
}
|
||||
|
||||
type OllamaBindDomain struct {
|
||||
|
@ -3,19 +3,20 @@ package service
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/1Panel-dev/1Panel/agent/app/dto"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/dto/request"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/model"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/repo"
|
||||
"github.com/1Panel-dev/1Panel/agent/app/task"
|
||||
"github.com/1Panel-dev/1Panel/agent/buserr"
|
||||
"github.com/1Panel-dev/1Panel/agent/constant"
|
||||
"github.com/1Panel-dev/1Panel/agent/global"
|
||||
"github.com/1Panel-dev/1Panel/agent/i18n"
|
||||
"github.com/1Panel-dev/1Panel/agent/utils/cmd"
|
||||
"github.com/1Panel-dev/1Panel/agent/utils/common"
|
||||
"github.com/jinzhu/copier"
|
||||
@ -25,9 +26,9 @@ type AIToolService struct{}
|
||||
|
||||
type IAIToolService interface {
|
||||
Search(search dto.SearchWithPage) (int64, []dto.OllamaModelInfo, error)
|
||||
Create(name string) error
|
||||
Create(req dto.OllamaModelName) error
|
||||
Close(name string) error
|
||||
Recreate(name string) error
|
||||
Recreate(req dto.OllamaModelName) error
|
||||
Delete(req dto.ForceDelete) error
|
||||
Sync() ([]dto.OllamaModelDropList, error)
|
||||
LoadDetail(name string) (string, error)
|
||||
@ -55,8 +56,8 @@ func (u *AIToolService) Search(req dto.SearchWithPage) (int64, []dto.OllamaModel
|
||||
if err := copier.Copy(&item, &itemModel); err != nil {
|
||||
return 0, nil, buserr.WithDetail("ErrStructTransform", err.Error(), nil)
|
||||
}
|
||||
logPath := path.Join(global.Dir.DataDir, "log", "AITools", itemModel.Name)
|
||||
if _, err := os.Stat(logPath); err == nil {
|
||||
taskModel, _ := taskRepo.GetFirst(taskRepo.WithResourceID(item.ID), repo.WithByType(task.TaskScopeAI))
|
||||
if len(taskModel.ID) != 0 {
|
||||
item.LogFileExist = true
|
||||
}
|
||||
dtoLists = append(dtoLists, item)
|
||||
@ -79,11 +80,11 @@ func (u *AIToolService) LoadDetail(name string) (string, error) {
|
||||
return stdout, err
|
||||
}
|
||||
|
||||
func (u *AIToolService) Create(name string) error {
|
||||
if cmd.CheckIllegal(name) {
|
||||
func (u *AIToolService) Create(req dto.OllamaModelName) error {
|
||||
if cmd.CheckIllegal(req.Name) {
|
||||
return buserr.New("ErrCmdIllegal")
|
||||
}
|
||||
modelInfo, _ := aiRepo.Get(repo.WithByName(name))
|
||||
modelInfo, _ := aiRepo.Get(repo.WithByName(req.Name))
|
||||
if modelInfo.ID != 0 {
|
||||
return buserr.New("ErrRecordExist")
|
||||
}
|
||||
@ -91,25 +92,34 @@ func (u *AIToolService) Create(name string) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
logItem := path.Join(global.Dir.DataDir, "log", "AITools", name)
|
||||
if _, err := os.Stat(path.Dir(logItem)); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(path.Dir(logItem), os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
info := model.OllamaModel{
|
||||
Name: name,
|
||||
Name: req.Name,
|
||||
From: "local",
|
||||
Status: constant.StatusWaiting,
|
||||
}
|
||||
if err := aiRepo.Create(&info); err != nil {
|
||||
return err
|
||||
}
|
||||
file, err := os.OpenFile(logItem, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
|
||||
taskItem, err := task.NewTaskWithOps(fmt.Sprintf("ollama-model-%s", req.Name), task.TaskPull, task.TaskScopeAI, req.TaskID, info.ID)
|
||||
if err != nil {
|
||||
global.LOG.Errorf("new task for exec shell failed, err: %v", err)
|
||||
return err
|
||||
}
|
||||
go pullOllamaModel(file, containerName, info)
|
||||
go func() {
|
||||
taskItem.AddSubTask(i18n.GetWithName("OllamaModelPull", req.Name), func(t *task.Task) error {
|
||||
return cmd.ExecShellWithTask(taskItem, time.Hour, "docker", "exec", containerName, "ollama", "pull", info.Name)
|
||||
}, nil)
|
||||
taskItem.AddSubTask(i18n.GetWithName("OllamaModelSize", req.Name), func(t *task.Task) error {
|
||||
itemSize, err := loadModelSize(info.Name, containerName)
|
||||
if len(itemSize) != 0 {
|
||||
_ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusSuccess, "size": itemSize})
|
||||
} else {
|
||||
_ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusFailed, "message": err.Error()})
|
||||
}
|
||||
return nil
|
||||
}, nil)
|
||||
_ = taskItem.Execute()
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -128,11 +138,11 @@ func (u *AIToolService) Close(name string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *AIToolService) Recreate(name string) error {
|
||||
if cmd.CheckIllegal(name) {
|
||||
func (u *AIToolService) Recreate(req dto.OllamaModelName) error {
|
||||
if cmd.CheckIllegal(req.Name) {
|
||||
return buserr.New("ErrCmdIllegal")
|
||||
}
|
||||
modelInfo, _ := aiRepo.Get(repo.WithByName(name))
|
||||
modelInfo, _ := aiRepo.Get(repo.WithByName(req.Name))
|
||||
if modelInfo.ID == 0 {
|
||||
return buserr.New("ErrRecordNotFound")
|
||||
}
|
||||
@ -143,17 +153,17 @@ func (u *AIToolService) Recreate(name string) error {
|
||||
if err := aiRepo.Update(modelInfo.ID, map[string]interface{}{"status": constant.StatusWaiting, "from": "local"}); err != nil {
|
||||
return err
|
||||
}
|
||||
logItem := path.Join(global.Dir.DataDir, "log", "AITools", name)
|
||||
if _, err := os.Stat(path.Dir(logItem)); err != nil && os.IsNotExist(err) {
|
||||
if err = os.MkdirAll(path.Dir(logItem), os.ModePerm); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
file, err := os.OpenFile(logItem, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
|
||||
taskItem, err := task.NewTaskWithOps(fmt.Sprintf("ollama-model-%s", req.Name), task.TaskPull, task.TaskScopeAI, req.TaskID, modelInfo.ID)
|
||||
if err != nil {
|
||||
global.LOG.Errorf("new task for exec shell failed, err: %v", err)
|
||||
return err
|
||||
}
|
||||
go pullOllamaModel(file, containerName, modelInfo)
|
||||
go func() {
|
||||
taskItem.AddSubTask(i18n.GetWithName("OllamaModelPull", req.Name), func(t *task.Task) error {
|
||||
return cmd.ExecShellWithTask(taskItem, time.Hour, "docker", "exec", containerName, "ollama", "pull", req.Name)
|
||||
}, nil)
|
||||
_ = taskItem.Execute()
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -354,22 +364,6 @@ func LoadContainerName() (string, error) {
|
||||
return ollamaBaseInfo.ContainerName, nil
|
||||
}
|
||||
|
||||
func pullOllamaModel(file *os.File, containerName string, info model.OllamaModel) {
|
||||
defer file.Close()
|
||||
cmd := exec.Command("docker", "exec", containerName, "ollama", "pull", info.Name)
|
||||
multiWriter := io.MultiWriter(os.Stdout, file)
|
||||
cmd.Stdout = multiWriter
|
||||
cmd.Stderr = multiWriter
|
||||
_ = cmd.Run()
|
||||
itemSize, err := loadModelSize(info.Name, containerName)
|
||||
if len(itemSize) != 0 {
|
||||
_ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusSuccess, "size": itemSize})
|
||||
} else {
|
||||
_ = aiRepo.Update(info.ID, map[string]interface{}{"status": constant.StatusFailed, "message": err.Error()})
|
||||
}
|
||||
_, _ = file.WriteString("ollama pull completed!")
|
||||
}
|
||||
|
||||
func loadModelSize(name string, containerName string) (string, error) {
|
||||
stdout, err := cmd.Execf("docker exec %s ollama list | grep %s", containerName, name)
|
||||
if err != nil {
|
||||
|
@ -139,7 +139,7 @@ func (u *CronjobService) handleCurl(cronjob model.Cronjob, taskID string) error
|
||||
}
|
||||
|
||||
taskItem.AddSubTask(i18n.GetWithName("HandleShell", cronjob.Name), func(t *task.Task) error {
|
||||
if err := cmd.ExecShellWithTask(taskItem, 24*time.Hour, "bash", "-c", "curl", cronjob.URL); err != nil {
|
||||
if err := cmd.ExecShellWithTask(taskItem, 24*time.Hour, "curl", cronjob.URL); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
@ -610,11 +610,7 @@ func (u *FirewallService) updatePingStatus(enable string) error {
|
||||
}
|
||||
|
||||
func (u *FirewallService) addPortsBeforeStart(client firewall.FirewallClient) error {
|
||||
serverPort, err := settingRepo.Get(settingRepo.WithByKey("ServerPort"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := client.Port(fireClient.FireInfo{Port: serverPort.Value, Protocol: "tcp", Strategy: "accept"}, "add"); err != nil {
|
||||
if err := client.Port(fireClient.FireInfo{Port: global.CONF.Base.Port, Protocol: "tcp", Strategy: "accept"}, "add"); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := client.Port(fireClient.FireInfo{Port: "22", Protocol: "tcp", Strategy: "accept"}, "add"); err != nil {
|
||||
|
@ -68,6 +68,7 @@ const (
|
||||
|
||||
const (
|
||||
TaskScopeWebsite = "Website"
|
||||
TaskScopeAI = "AI"
|
||||
TaskScopeApp = "App"
|
||||
TaskScopeRuntime = "Runtime"
|
||||
TaskScopeDatabase = "Database"
|
||||
|
@ -2,6 +2,7 @@ package constant
|
||||
|
||||
const (
|
||||
StatusRunning = "Running"
|
||||
StatusCanceled = "Canceled"
|
||||
StatusDone = "Done"
|
||||
StatusWaiting = "Waiting"
|
||||
StatusSuccess = "Success"
|
||||
|
@ -312,6 +312,10 @@ SubTask: "子任务"
|
||||
RuntimeExtension: "运行环境扩展"
|
||||
TaskIsExecuting: "任务正在运行"
|
||||
|
||||
# task - ai
|
||||
OllamaModelPull: "拉取 Ollama 模型 {{ .name }} "
|
||||
OllamaModelSize: "获取 Ollama 模型 {{ .name }} 大小 "
|
||||
|
||||
# task - snapshot
|
||||
Snapshot: "快照"
|
||||
SnapDBInfo: "写入 1Panel 数据库信息"
|
||||
|
@ -18,6 +18,7 @@ func Init() {
|
||||
initGlobalData()
|
||||
handleCronjobStatus()
|
||||
handleSnapStatus()
|
||||
handleOllamaModelStatus()
|
||||
|
||||
loadLocalDir()
|
||||
}
|
||||
@ -82,6 +83,11 @@ func handleCronjobStatus() {
|
||||
}
|
||||
}
|
||||
|
||||
func handleOllamaModelStatus() {
|
||||
message := "the task was interrupted due to the restart of the 1panel service"
|
||||
_ = global.DB.Model(&model.OllamaModel{}).Where("status = ?", constant.StatusWaiting).Updates(map[string]interface{}{"status": constant.StatusCanceled, "message": message}).Error
|
||||
}
|
||||
|
||||
func handleCronJobAlert(cronjob *model.Cronjob) {
|
||||
pushAlert := dto.PushAlert{
|
||||
TaskName: cronjob.Name,
|
||||
|
@ -146,12 +146,27 @@ func ExecShell(outPath string, timeout time.Duration, name string, arg ...string
|
||||
|
||||
type CustomWriter struct {
|
||||
taskItem *task.Task
|
||||
buffer bytes.Buffer
|
||||
}
|
||||
|
||||
func (cw *CustomWriter) Write(p []byte) (n int, err error) {
|
||||
cw.taskItem.Log(string(p))
|
||||
cw.buffer.Write(p)
|
||||
lines := strings.Split(cw.buffer.String(), "\n")
|
||||
|
||||
for i := 0; i < len(lines)-1; i++ {
|
||||
cw.taskItem.Log(lines[i])
|
||||
}
|
||||
cw.buffer.Reset()
|
||||
cw.buffer.WriteString(lines[len(lines)-1])
|
||||
|
||||
return len(p), nil
|
||||
}
|
||||
func (cw *CustomWriter) Flush() {
|
||||
if cw.buffer.Len() > 0 {
|
||||
cw.taskItem.Log(cw.buffer.String())
|
||||
cw.buffer.Reset()
|
||||
}
|
||||
}
|
||||
func ExecShellWithTask(taskItem *task.Task, timeout time.Duration, name string, arg ...string) error {
|
||||
env := os.Environ()
|
||||
customWriter := &CustomWriter{taskItem: taskItem}
|
||||
@ -165,6 +180,7 @@ func ExecShellWithTask(taskItem *task.Task, timeout time.Duration, name string,
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
done <- cmd.Wait()
|
||||
customWriter.Flush()
|
||||
}()
|
||||
after := time.After(timeout)
|
||||
select {
|
||||
|
@ -3,13 +3,15 @@ package router
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"github.com/1Panel-dev/1Panel/core/app/repo"
|
||||
"github.com/1Panel-dev/1Panel/core/utils/common"
|
||||
"net/http"
|
||||
"path"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/1Panel-dev/1Panel/core/app/repo"
|
||||
"github.com/1Panel-dev/1Panel/core/utils/common"
|
||||
|
||||
"github.com/1Panel-dev/1Panel/core/app/service"
|
||||
"github.com/1Panel-dev/1Panel/core/cmd/server/res"
|
||||
"github.com/1Panel-dev/1Panel/core/constant"
|
||||
@ -155,7 +157,7 @@ func checkSession(c *gin.Context) bool {
|
||||
func setWebStatic(rootRouter *gin.RouterGroup) {
|
||||
rootRouter.StaticFS("/public", http.FS(web.Favicon))
|
||||
rootRouter.StaticFS("/favicon.ico", http.FS(web.Favicon))
|
||||
rootRouter.Static("/api/v2/images", "./uploads")
|
||||
rootRouter.Static("/api/v2/images", path.Join(global.CONF.Base.InstallDir, "1panel/uploads/theme"))
|
||||
rootRouter.Use(func(c *gin.Context) {
|
||||
c.Next()
|
||||
})
|
||||
|
@ -2,11 +2,11 @@ import { AI } from '@/api/interface/ai';
|
||||
import http from '@/api';
|
||||
import { ResPage } from '../interface';
|
||||
|
||||
export const createOllamaModel = (name: string) => {
|
||||
return http.post(`/ai/ollama/model`, { name: name });
|
||||
export const createOllamaModel = (name: string, taskID: string) => {
|
||||
return http.post(`/ai/ollama/model`, { name: name, taskID: taskID });
|
||||
};
|
||||
export const recreateOllamaModel = (name: string) => {
|
||||
return http.post(`/ai/ollama/model/recreate`, { name: name });
|
||||
export const recreateOllamaModel = (name: string, taskID: string) => {
|
||||
return http.post(`/ai/ollama/model/recreate`, { name: name, taskID: taskID });
|
||||
};
|
||||
export const deleteOllamaModel = (ids: Array<number>, force: boolean) => {
|
||||
return http.post(`/ai/ollama/model/del`, { ids: ids, forceDelete: force });
|
||||
|
@ -134,6 +134,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: 'No data available',
|
||||
disConn:
|
||||
'Please click the disconnect button directly to terminate the terminal connection, avoiding the use of exit commands like {0}.',
|
||||
delete: 'This operation delete cannot be rolled back. Do you want to continue?',
|
||||
clean: 'This operation clean cannot be rolled back. Do you want to continue?',
|
||||
deleteSuccess: 'Delete Success',
|
||||
|
@ -126,6 +126,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: '利用可能なデータはありません',
|
||||
disConn:
|
||||
'端末接続を切断するには、{0} のような終了コマンドを使用せずに、直接切断ボタンをクリックしてください',
|
||||
delete: `この操作削除は元に戻すことはできません。続けたいですか?`,
|
||||
clean: `この操作は取り消すことはできません。続けたいですか?`,
|
||||
deleteSuccess: '正常に削除されました',
|
||||
|
@ -126,6 +126,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: '데이터가 없습니다',
|
||||
disConn:
|
||||
'종료 명령어인 {0} 등을 사용하지 않고 직접 연결 끊기 버튼을 클릭하여 터미널 연결을 종료해 주십시오.',
|
||||
delete: `이 작업은 되돌릴 수 없습니다. 계속하시겠습니까?`,
|
||||
clean: `이 작업은 되돌릴 수 없습니다. 계속하시겠습니까?`,
|
||||
deleteSuccess: '삭제 완료',
|
||||
|
@ -126,6 +126,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: 'Tiada data tersedia',
|
||||
disConn:
|
||||
'Sila klik butang putus sambungan secara langsung untuk menamatkan sambungan terminal, mengelakkan penggunaan arahan keluar seperti {0}.',
|
||||
delete: 'Operasi ini tidak boleh diundur. Adakah anda mahu meneruskan?',
|
||||
clean: 'Operasi ini tidak boleh diundur. Adakah anda mahu meneruskan?',
|
||||
deleteSuccess: 'Berjaya dipadam',
|
||||
|
@ -126,6 +126,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: 'Nenhum dado disponível',
|
||||
disConn:
|
||||
'Por favor, clique diretamente no botão de desconexão para encerrar a conexão do terminal, evitando o uso de comandos de saída como {0}.',
|
||||
delete: 'Esta operação de exclusão não pode ser desfeita. Deseja continuar?',
|
||||
clean: 'Esta operação de limpeza não pode ser desfeita. Deseja continuar?',
|
||||
deleteSuccess: 'Excluído com sucesso',
|
||||
|
@ -126,6 +126,8 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: 'Нет данных',
|
||||
disConn:
|
||||
'Пожалуйста, нажмите кнопку отключения, чтобы разорвать соединение с терминалом, избегая использования команд выхода, таких как {0}.',
|
||||
delete: 'Эта операция удаления не может быть отменена. Хотите продолжить?',
|
||||
clean: 'Эта операция очистки не может быть отменена. Хотите продолжить?',
|
||||
deleteSuccess: 'Успешно удалено',
|
||||
|
@ -131,6 +131,7 @@ const message = {
|
||||
},
|
||||
msg: {
|
||||
noneData: '暫無數據',
|
||||
disConn: '請直接點選斷開按鈕斷開終端連接,避免使用 {0} 等退出指令。',
|
||||
delete: '刪除 操作不可回滾,是否繼續?',
|
||||
clean: '清空 操作不可回滾,是否繼續?',
|
||||
deleteSuccess: '刪除成功',
|
||||
|
@ -130,6 +130,7 @@ const message = {
|
||||
Rollbacking: '快照回滚中,请稍候...',
|
||||
},
|
||||
msg: {
|
||||
disConn: '请直接点击断开按钮断开终端连接,避免使用 {0} 等退出命令',
|
||||
noneData: '暂无数据',
|
||||
delete: '删除 操作不可回滚,是否继续?',
|
||||
clean: '清空 操作不可回滚,是否继续?',
|
||||
|
@ -46,6 +46,7 @@ import i18n from '@/lang';
|
||||
import { ElForm } from 'element-plus';
|
||||
import { MsgSuccess } from '@/utils/message';
|
||||
import { createOllamaModel } from '@/api/modules/ai';
|
||||
import { newUUID } from '@/utils/util';
|
||||
|
||||
const drawerVisible = ref(false);
|
||||
const form = reactive({
|
||||
@ -63,15 +64,17 @@ const formRef = ref<FormInstance>();
|
||||
|
||||
const onSubmit = async (formEl: FormInstance | undefined) => {
|
||||
if (!formEl) return;
|
||||
let taskID = newUUID();
|
||||
formEl.validate(async (valid) => {
|
||||
if (!valid) return;
|
||||
let itemName = form.name.replaceAll('ollama run ', '').replaceAll('ollama pull ', '');
|
||||
await createOllamaModel(itemName);
|
||||
await createOllamaModel(itemName, taskID).then(() => {
|
||||
drawerVisible.value = false;
|
||||
emit('search');
|
||||
emit('log', { logFileExist: true, name: itemName, from: 'local' });
|
||||
emit('log', { logFileExist: true, name: itemName, from: 'local', taskID: taskID });
|
||||
MsgSuccess(i18n.global.t('commons.msg.operationSuccess'));
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const goSearch = () => {
|
||||
|
@ -168,13 +168,14 @@
|
||||
</template>
|
||||
</OpDialog>
|
||||
<AddDialog ref="addRef" @search="search" @log="onLoadLog" />
|
||||
<Log ref="logRef" @close="search" />
|
||||
<Del ref="delRef" @search="search" />
|
||||
<Terminal ref="terminalRef" />
|
||||
<Conn ref="connRef" />
|
||||
<CodemirrorDialog ref="detailRef" />
|
||||
<PortJumpDialog ref="dialogPortJumpRef" />
|
||||
<BindDomain ref="bindDomainRef" />
|
||||
|
||||
<TaskLog ref="taskLogRef" width="70%" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
@ -182,9 +183,9 @@
|
||||
import AppStatus from '@/components/app-status/index.vue';
|
||||
import AddDialog from '@/views/ai/model/add/index.vue';
|
||||
import Conn from '@/views/ai/model/conn/index.vue';
|
||||
import TaskLog from '@/components/task-log/index.vue';
|
||||
import Terminal from '@/views/ai/model/terminal/index.vue';
|
||||
import Del from '@/views/ai/model/del/index.vue';
|
||||
import Log from '@/components/log-dialog/index.vue';
|
||||
import PortJumpDialog from '@/components/port-jump/index.vue';
|
||||
import CodemirrorDialog from '@/components/codemirror-dialog/index.vue';
|
||||
import { computed, onMounted, reactive, ref } from 'vue';
|
||||
@ -200,7 +201,7 @@ import {
|
||||
} from '@/api/modules/ai';
|
||||
import { AI } from '@/api/interface/ai';
|
||||
import { getAppPort } from '@/api/modules/app';
|
||||
import { dateFormat } from '@/utils/util';
|
||||
import { dateFormat, newUUID } from '@/utils/util';
|
||||
import router from '@/routers';
|
||||
import { MsgInfo, MsgSuccess } from '@/utils/message';
|
||||
import BindDomain from '@/views/ai/model/domain/index.vue';
|
||||
@ -210,7 +211,6 @@ const loading = ref(false);
|
||||
const selects = ref<any>([]);
|
||||
const maskShow = ref(true);
|
||||
const addRef = ref();
|
||||
const logRef = ref();
|
||||
const detailRef = ref();
|
||||
const delRef = ref();
|
||||
const connRef = ref();
|
||||
@ -220,6 +220,7 @@ const dashboardVisible = ref(false);
|
||||
const dialogPortJumpRef = ref();
|
||||
const appStatusRef = ref();
|
||||
const bindDomainRef = ref();
|
||||
const taskLogRef = ref();
|
||||
const data = ref();
|
||||
const paginationConfig = reactive({
|
||||
cacheSizeKey: 'model-page-size',
|
||||
@ -355,16 +356,21 @@ const onSubmitDelete = async () => {
|
||||
|
||||
const onReCreate = async (name: string) => {
|
||||
loading.value = true;
|
||||
await recreateOllamaModel(name)
|
||||
let taskID = newUUID();
|
||||
await recreateOllamaModel(name, taskID)
|
||||
.then(() => {
|
||||
loading.value = false;
|
||||
MsgSuccess(i18n.global.t('commons.msg.operationSuccess'));
|
||||
openTaskLog(taskID);
|
||||
search();
|
||||
})
|
||||
.catch(() => {
|
||||
loading.value = false;
|
||||
});
|
||||
};
|
||||
const openTaskLog = (taskID: string) => {
|
||||
taskLogRef.value.openWithTaskID(taskID);
|
||||
};
|
||||
|
||||
const onDelete = async (row: AI.OllamaModelInfo) => {
|
||||
let names = [];
|
||||
@ -383,7 +389,7 @@ const onDelete = async (row: AI.OllamaModelInfo) => {
|
||||
title: i18n.global.t('commons.button.delete'),
|
||||
names: names,
|
||||
msg: i18n.global.t('commons.msg.operatorHelper', [
|
||||
i18n.global.t('cronjob.cronTask'),
|
||||
i18n.global.t('aiTools.model.model'),
|
||||
i18n.global.t('commons.button.delete'),
|
||||
]),
|
||||
api: null,
|
||||
@ -392,6 +398,9 @@ const onDelete = async (row: AI.OllamaModelInfo) => {
|
||||
};
|
||||
|
||||
const onLoadLog = (row: any) => {
|
||||
if (row.taskID) {
|
||||
openTaskLog(row.taskID);
|
||||
}
|
||||
if (row.from === 'remote') {
|
||||
MsgInfo(i18n.global.t('aiTools.model.from_remote'));
|
||||
return;
|
||||
@ -400,7 +409,7 @@ const onLoadLog = (row: any) => {
|
||||
MsgInfo(i18n.global.t('aiTools.model.no_logs'));
|
||||
return;
|
||||
}
|
||||
logRef.value.acceptParams({ id: 0, type: 'ollama-model', name: row.name, tail: true });
|
||||
taskLogRef.value.openWithResourceID('AI', 'TaskPull', row.id);
|
||||
};
|
||||
|
||||
const buttons = [
|
||||
|
@ -47,7 +47,7 @@ const acceptParams = async (params: DialogProps): Promise<void> => {
|
||||
const initTerm = () => {
|
||||
nextTick(() => {
|
||||
terminalRef.value.acceptParams({
|
||||
endpoint: '/api/v1/ai/ollama/exec',
|
||||
endpoint: '/api/v2/ai/ollama/exec',
|
||||
args: `name=${itemName.value}`,
|
||||
error: '',
|
||||
initCmd: '',
|
||||
|
@ -7,7 +7,13 @@
|
||||
size="large"
|
||||
>
|
||||
<template #content>
|
||||
<el-form ref="formRef" :model="form" label-position="top">
|
||||
<el-alert type="error" :closable="false">
|
||||
<template #title>
|
||||
<span>{{ $t('commons.msg.disConn', ['exit']) }}</span>
|
||||
</template>
|
||||
</el-alert>
|
||||
|
||||
<el-form ref="formRef" class="mt-2" :model="form" label-position="top">
|
||||
<el-form-item :label="$t('commons.table.user')" prop="user">
|
||||
<el-input placeholder="root" clearable v-model="form.user" />
|
||||
</el-form-item>
|
||||
@ -43,7 +49,7 @@
|
||||
</el-button>
|
||||
<el-button v-else @click="onClose()">{{ $t('commons.button.disConn') }}</el-button>
|
||||
<Terminal
|
||||
style="height: calc(100vh - 302px); margin-top: 18px"
|
||||
style="height: calc(100vh - 355px); margin-top: 18px"
|
||||
ref="terminalRef"
|
||||
v-if="terminalOpen"
|
||||
></Terminal>
|
||||
|
@ -152,8 +152,8 @@
|
||||
|
||||
<div v-if="dialogData.rowData!.specCustom">
|
||||
<el-form-item prop="spec">
|
||||
<div v-for="(spec, index) of dialogData.rowData.specs" :key="index">
|
||||
<el-input style="width: 80%" v-model="dialogData.rowData.specs[index]" />
|
||||
<div v-for="(spec, index) of dialogData.rowData.specs" :key="index" class="w-full">
|
||||
<el-input class="specCustom" v-model="dialogData.rowData.specs[index]" />
|
||||
<el-popover
|
||||
placement="top-start"
|
||||
:title="$t('cronjob.nextTime')"
|
||||
@ -677,6 +677,17 @@ const verifyScript = (rule: any, value: any, callback: any) => {
|
||||
};
|
||||
|
||||
const verifySpec = (rule: any, value: any, callback: any) => {
|
||||
if (dialogData.value.rowData!.specCustom) {
|
||||
for (let i = 0; i < dialogData.value.rowData!.specs.length; i++) {
|
||||
if (dialogData.value.rowData!.specs[i]) {
|
||||
continue;
|
||||
}
|
||||
callback(new Error(i18n.global.t('cronjob.cronSpecRule', [i + 1])));
|
||||
return;
|
||||
}
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
if (dialogData.value.rowData!.specObjs.length === 0) {
|
||||
callback(new Error(i18n.global.t('commons.rule.requiredInput')));
|
||||
}
|
||||
@ -792,6 +803,7 @@ const rules = reactive({
|
||||
{ validator: verifySpec, trigger: 'blur', required: true },
|
||||
{ validator: verifySpec, trigger: 'change', required: true },
|
||||
],
|
||||
specCustom: [Rules.requiredSelect],
|
||||
|
||||
script: [{ validator: verifyScript, trigger: 'blur', required: true }],
|
||||
containerName: [Rules.requiredSelect],
|
||||
@ -1094,6 +1106,14 @@ defineExpose({
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
.specCustom {
|
||||
width: 80%;
|
||||
}
|
||||
@media only screen and (max-width: 1000px) {
|
||||
.specCustom {
|
||||
width: 100% !important;
|
||||
}
|
||||
}
|
||||
.selectClass {
|
||||
width: 100%;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user