@@ -485,6 +485,12 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) { | |||
return aiModelManages, count, nil | |||
} | |||
func QueryModelConvertCountByRepoID(repoId int64) int64 { | |||
convert := new(AiModelConvert) | |||
total, _ := x.Where("repo_id =?", repoId).Count(convert) | |||
return total | |||
} | |||
func QueryModelConvertByRepoID(repoId int64) ([]*AiModelConvert, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
@@ -1828,7 +1828,7 @@ func QueryModelTrainJobVersionList(jobId string) ([]*Cloudbrain, int, error) { | |||
return cloudbrains, int(len(cloudbrains)), nil | |||
} | |||
func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) { | |||
func QueryModelTrainJobList(repoId int64) ([]*Cloudbrain, int, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
var cond = builder.NewCond() | |||
@@ -1845,14 +1845,14 @@ func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) { | |||
// builder.In("type", 0, 1), | |||
// ) | |||
cloudbrains := make([]*CloudbrainInfo, 0) | |||
cloudbrains := make([]*Cloudbrain, 0) | |||
if err := sess.Select("job_id,display_job_name").Table(&Cloudbrain{}).Where(cond).OrderBy("created_unix DESC"). | |||
Find(&cloudbrains); err != nil { | |||
return nil, 0, fmt.Errorf("Find: %v", err) | |||
} | |||
keys := make(map[string]string) | |||
uniqueElements := make([]*CloudbrainInfo, 0) | |||
uniqueElements := make([]*Cloudbrain, 0) | |||
for _, entry := range cloudbrains { | |||
if _, value := keys[entry.JobID]; !value { | |||
keys[entry.JobID] = entry.DisplayJobName | |||
@@ -2131,6 +2131,15 @@ func GetCloudbrainByName(jobName string) (*Cloudbrain, error) { | |||
cb := &Cloudbrain{JobName: jobName} | |||
return getRepoCloudBrain(cb) | |||
} | |||
func GetWaitOrRunFileNotebookByRepo(repoId int64, cloudbrainType int) (*Cloudbrain, error) { | |||
cloudBrain := new(Cloudbrain) | |||
has, err := x.In("status", JobWaiting, JobRunning, ModelArtsCreateQueue, ModelArtsCreating, ModelArtsStarting, | |||
ModelArtsReadyToStart, ModelArtsResizing, ModelArtsStartQueuing, ModelArtsRunning, ModelArtsDeleting, ModelArtsRestarting).Where("repo_id=? and type=? and boot_file!=''", repoId, cloudbrainType).Get(cloudBrain) | |||
if has { | |||
return cloudBrain, err | |||
} | |||
return nil, err | |||
} | |||
func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool { | |||
if !isSigned || (job.Status != string(JobStopped) && job.Status != string(JobFailed) && job.Status != string(ModelArtsStartFailed) && job.Status != string(ModelArtsCreateFailed)) { | |||
@@ -36,7 +36,7 @@ type RepoStatistic struct { | |||
NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
RepoSize int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
DatasetSize int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumModels int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumModels int64 `xorm:"NOT NULL DEFAULT 0" json:"model"` | |||
NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumCommits int64 `xorm:"NOT NULL DEFAULT 0" json:"commit"` | |||
NumCommitsAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
@@ -55,6 +55,15 @@ type RepoStatistic struct { | |||
NumIssuesGrowth int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumCommentsGrowth int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumDatasetFile int64 `xorm:"NOT NULL DEFAULT 0" json:"datasetFiles"` | |||
NumCloudbrain int64 `xorm:"NOT NULL DEFAULT 0" json:"cloudbrains"` | |||
NumModelConvert int64 `xorm:"NOT NULL DEFAULT 0" json:"modelConverts"` | |||
NumDatasetFileAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumCloudbrainAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"-"` | |||
NumModelConvertAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"- "` | |||
NumModelsAdded int64 `xorm:"NOT NULL DEFAULT 0" json:"- "` | |||
Impact float64 `xorm:"NOT NULL DEFAULT 0" json:"impact"` | |||
Completeness float64 `xorm:"NOT NULL DEFAULT 0" json:"completeness"` | |||
Liveness float64 `xorm:"NOT NULL DEFAULT 0" json:"liveness"` | |||
@@ -709,6 +709,7 @@ type GenerateModelArtsNotebookReq struct { | |||
ImageId string | |||
AutoStopDurationMs int64 | |||
BranchName string | |||
Spec *models.Specification | |||
ModelName string | |||
@@ -34,8 +34,7 @@ func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain { | |||
StartTime: int64(task.StartTime), | |||
EndTime: int64(task.EndTime), | |||
Spec: ToSpecification(task.Spec), | |||
Spec: ToSpecification(task.Spec), | |||
} | |||
} | |||
func ToAttachment(attachment *models.Attachment) *api.AttachmentShow { | |||
@@ -89,6 +88,9 @@ func ToDataset(dataset *models.Dataset) *api.Dataset { | |||
} | |||
func ToSpecification(s *models.Specification) *api.SpecificationShow { | |||
if s == nil { | |||
return nil | |||
} | |||
return &api.SpecificationShow{ | |||
ID: s.ID, | |||
AccCardsNum: s.AccCardsNum, | |||
@@ -1,13 +1,9 @@ | |||
package modelarts | |||
import ( | |||
"encoding/base64" | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
"io/ioutil" | |||
"net/http" | |||
"path" | |||
"strconv" | |||
"strings" | |||
@@ -239,6 +235,7 @@ func GenerateNotebook2(ctx *context.Context, req cloudbrain.GenerateModelArtsNot | |||
ComputeResource: models.NPUResource, | |||
Image: imageName, | |||
BootFile: req.BootFile, | |||
BranchName: req.BranchName, | |||
Description: req.Description, | |||
CreatedUnix: createTime, | |||
UpdatedUnix: createTime, | |||
@@ -830,10 +827,6 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { | |||
task.FlavorCode = result.Flavor | |||
} | |||
if oldStatus != task.Status && task.Status == string(models.ModelArtsRunning) && task.BootFile != "" { | |||
uploadNoteBookFile(task, result) | |||
} | |||
err = models.UpdateJob(task) | |||
if err != nil { | |||
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) | |||
@@ -844,81 +837,6 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { | |||
return nil | |||
} | |||
func uploadNoteBookFile(task *models.Cloudbrain, result *models.GetNotebook2Result) { | |||
jupyterUrl := result.Url + "?token=" + result.Token | |||
cookies, xsrf := getCookiesAndCsrf(jupyterUrl) | |||
if xsrf == "" { | |||
log.Error("browser jupyterUrl failed:%v", task.DisplayJobName) | |||
} else { | |||
codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath | |||
fileContents, err := ioutil.ReadFile(codePath + "/" + task.BootFile) | |||
if err != nil { | |||
log.Error("read jupyter file failed:%v", task.DisplayJobName, err) | |||
} | |||
base64Content := base64.StdEncoding.EncodeToString(fileContents) | |||
client := getRestyClient() | |||
uploadUrl := getJupyterBaseUrl(result.Url) + "api/contents/" + path.Base(task.BootFile) | |||
res, err := client.R(). | |||
SetCookies(cookies). | |||
SetHeader("X-XSRFToken", xsrf). | |||
SetBody(map[string]interface{}{ | |||
"type": "file", | |||
"format": "base64", | |||
"name": path.Base(task.BootFile), | |||
"path": path.Base(task.BootFile), | |||
"content": base64Content}). | |||
Put(uploadUrl) | |||
if err != nil { | |||
log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) | |||
} else if res.StatusCode() != http.StatusCreated { | |||
log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) | |||
} | |||
} | |||
} | |||
func getJupyterBaseUrl(url string) string { | |||
jupyterUrlLength := len(url) | |||
baseUrl := url[0 : jupyterUrlLength-len(path.Base(url))] | |||
return baseUrl | |||
} | |||
func getCookiesAndCsrf(jupyterUrl string) ([]*http.Cookie, string) { | |||
log.Info("jupyter url:" + jupyterUrl) | |||
var cookies []*http.Cookie | |||
const retryTimes = 10 | |||
for i := 0; i < retryTimes; i++ { | |||
res, err := http.Get(jupyterUrl) | |||
if err != nil { | |||
log.Error("browser jupyterUrl failed.", err) | |||
if i == retryTimes-1 { | |||
return cookies, "" | |||
} | |||
} else { | |||
cookies = res.Cookies() | |||
xsrf := "" | |||
for _, cookie := range cookies { | |||
if cookie.Name == "_xsrf" { | |||
xsrf = cookie.Value | |||
break | |||
} | |||
} | |||
if xsrf != "" { | |||
return cookies, xsrf | |||
} | |||
} | |||
} | |||
return cookies, "" | |||
} | |||
func SyncTempStatusJob() { | |||
jobs, err := models.GetCloudBrainTempJobs() | |||
if err != nil { | |||
@@ -148,6 +148,7 @@ func GenerateNotebook(ctx *context.Context, req cloudbrain.GenerateModelArtsNote | |||
UpdatedUnix: createTime, | |||
Spec: req.Spec, | |||
BootFile: req.BootFile, | |||
BranchName: req.BranchName, | |||
ModelName: req.ModelName, | |||
ModelVersion: req.ModelVersion, | |||
LabelName: req.LabelName, | |||
@@ -0,0 +1,198 @@ | |||
package notebook | |||
import ( | |||
"crypto/tls" | |||
"encoding/base64" | |||
"fmt" | |||
"io/ioutil" | |||
"net/http" | |||
"path" | |||
"strings" | |||
"github.com/go-resty/resty/v2" | |||
"code.gitea.io/gitea/models" | |||
"code.gitea.io/gitea/modules/cloudbrain" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/log" | |||
) | |||
var restyClient *resty.Client | |||
type NotebookApiResponse struct { | |||
Name string `json:"name"` | |||
Path string `json:"path"` | |||
} | |||
type NotebookContent struct { | |||
Url string | |||
Path string | |||
Cookies []*http.Cookie | |||
Xsrf string | |||
PathType string //file directory | |||
Token string | |||
} | |||
func (c *NotebookContent) IsNotebookFileCanBrowser() bool { | |||
if c.Xsrf == "" { | |||
c.SetCookiesAndCsrf() | |||
} | |||
if c.Xsrf == "" { | |||
log.Warn("xsrf is empty, can not broswer url:" + c.Url) | |||
return false | |||
} | |||
return c.IsNoteBookContentsExist() | |||
} | |||
func (c *NotebookContent) SetCookiesAndCsrf() { | |||
log.Info("jupyter url:" + c.Url) | |||
var cookies []*http.Cookie | |||
const retryTimes = 10 | |||
url := c.Url | |||
if c.Token != "" { | |||
url = c.Url + "?token=" + c.Token | |||
} | |||
for i := 0; i < retryTimes; i++ { | |||
res, err := http.Get(url) | |||
if err != nil { | |||
log.Error("browser jupyterUrl failed.", err) | |||
if i == retryTimes-1 { | |||
c.Cookies = cookies | |||
} | |||
} else { | |||
cookies = res.Cookies() | |||
xsrf := "" | |||
for _, cookie := range cookies { | |||
if cookie.Name == "_xsrf" { | |||
xsrf = cookie.Value | |||
if len(cookies) > 1 { | |||
break | |||
} | |||
} | |||
} | |||
if xsrf != "" { | |||
c.Cookies = cookies | |||
c.Xsrf = xsrf | |||
} | |||
} | |||
} | |||
c.Cookies = cookies | |||
} | |||
func (c *NotebookContent) IsNoteBookContentsExist() bool { | |||
client := getRestyClient() | |||
uploadUrl := getJupyterBaseUrl(c.Url) + "api/contents/" + c.Path + "?type=" + c.PathType | |||
res, err := client.R(). | |||
SetCookies(c.Cookies). | |||
SetHeader("X-XSRFToken", c.Xsrf). | |||
Get(uploadUrl) | |||
if err != nil { | |||
log.Warn("browser url error:"+uploadUrl, err) | |||
return false | |||
} | |||
return res.StatusCode() == http.StatusOK | |||
} | |||
func (c *NotebookContent) UploadNoteBookFile(task *models.Cloudbrain) error { | |||
err := c.MakeNoteBookDir() | |||
if err != nil { | |||
return err | |||
} | |||
codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath | |||
fileContents, err := ioutil.ReadFile(codePath + "/" + c.Path) | |||
if err != nil { | |||
log.Error("read jupyter file failed:%v", task.DisplayJobName, err) | |||
} | |||
base64Content := base64.StdEncoding.EncodeToString(fileContents) | |||
client := getRestyClient() | |||
uploadUrl := getJupyterBaseUrl(c.Url) + "api/contents/" + c.Path | |||
res, err := client.R(). | |||
SetCookies(c.Cookies). | |||
SetHeader("X-XSRFToken", c.Xsrf). | |||
SetBody(map[string]interface{}{ | |||
"type": "file", | |||
"format": "base64", | |||
"name": path.Base(c.Path), | |||
"path": c.Path, | |||
"content": base64Content}). | |||
Put(uploadUrl) | |||
if err != nil { | |||
log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) | |||
return err | |||
} else if res.StatusCode() != http.StatusCreated { | |||
log.Error("upload jupyter file failed:%v, status is %s", task.DisplayJobName, res.Status(), err) | |||
return fmt.Errorf("status:", res.StatusCode()) | |||
} | |||
return nil | |||
} | |||
/** | |||
if c.Path is a/b/c.txt | |||
makedir a/b | |||
if c.Path is a/b/c | |||
makedir a/b | |||
*/ | |||
func (c *NotebookContent) MakeNoteBookDir() error { | |||
filePaths := strings.Split(c.Path, "/") | |||
for i := 0; i < len(filePaths)-1; i++ { | |||
cTemp := &NotebookContent{ | |||
Url: c.Url, | |||
Cookies: c.Cookies, | |||
Path: path.Join(filePaths[0 : i+1]...), | |||
PathType: "directory", | |||
Xsrf: c.Xsrf, | |||
} | |||
if !cTemp.IsNoteBookContentsExist() { | |||
createTempDirUrl := getJupyterBaseUrl(cTemp.Url) + "api/contents/" + cTemp.Path | |||
client := getRestyClient() | |||
var jobResult NotebookApiResponse | |||
res, err := client.R(). | |||
SetCookies(c.Cookies). | |||
SetHeader("X-XSRFToken", c.Xsrf). | |||
SetBody(map[string]interface{}{ | |||
"type": cTemp.PathType, | |||
"path": cTemp.Path, | |||
}).SetResult(&jobResult). | |||
Put(createTempDirUrl) | |||
if err != nil { | |||
return err | |||
} | |||
if res.StatusCode() != http.StatusCreated { | |||
return fmt.Errorf("status code:" + res.Status()) | |||
} | |||
} | |||
} | |||
return nil | |||
} | |||
func getJupyterBaseUrl(url string) string { | |||
jupyterUrlLength := len(url) | |||
baseUrl := url | |||
if strings.HasSuffix(url, "lab") { | |||
baseUrl = url[0 : jupyterUrlLength-len(path.Base(url))] | |||
} | |||
return baseUrl | |||
} | |||
func getRestyClient() *resty.Client { | |||
if restyClient == nil { | |||
restyClient = resty.New() | |||
restyClient.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}) | |||
} | |||
return restyClient | |||
} |
@@ -519,7 +519,6 @@ var ( | |||
CullIdleTimeout string | |||
CullInterval string | |||
//benchmark config | |||
IsBenchmarkEnabled bool | |||
BenchmarkOwner string | |||
@@ -617,14 +616,14 @@ var ( | |||
UsageRateBeginTime string | |||
}{} | |||
ClearStrategy= struct { | |||
Enabled bool | |||
ResultSaveDays int | |||
BatchSize int | |||
DebugJobSize int | |||
TrashSaveDays int | |||
Cron string | |||
RunAtStart bool | |||
ClearStrategy = struct { | |||
Enabled bool | |||
ResultSaveDays int | |||
BatchSize int | |||
DebugJobSize int | |||
TrashSaveDays int | |||
Cron string | |||
RunAtStart bool | |||
}{} | |||
C2NetInfos *C2NetSqInfos | |||
@@ -711,6 +710,7 @@ var ( | |||
ProjectHealth float64 | |||
ProjectHealthIssueCompleteRatio float64 | |||
ProjectHealth0IssueCloseRatio float64 | |||
TeamHealth float64 | |||
TeamHealthContributors float64 | |||
@@ -1705,16 +1705,16 @@ func getModelartsCDConfig() { | |||
getNotebookFlavorInfos() | |||
} | |||
func getClearStrategy(){ | |||
func getClearStrategy() { | |||
sec := Cfg.Section("clear_strategy") | |||
ClearStrategy.Enabled=sec.Key("ENABLED").MustBool(false) | |||
ClearStrategy.ResultSaveDays=sec.Key("RESULT_SAVE_DAYS").MustInt(30) | |||
ClearStrategy.BatchSize=sec.Key("BATCH_SIZE").MustInt(500) | |||
ClearStrategy.DebugJobSize=sec.Key("DEBUG_BATCH_SIZE").MustInt(100) | |||
ClearStrategy.TrashSaveDays=sec.Key("TRASH_SAVE_DAYS").MustInt(90) | |||
ClearStrategy.Cron=sec.Key("CRON").MustString("* 0,30 2-8 * * ?") | |||
ClearStrategy.RunAtStart=sec.Key("RUN_AT_START").MustBool(false) | |||
ClearStrategy.Enabled = sec.Key("ENABLED").MustBool(false) | |||
ClearStrategy.ResultSaveDays = sec.Key("RESULT_SAVE_DAYS").MustInt(30) | |||
ClearStrategy.BatchSize = sec.Key("BATCH_SIZE").MustInt(500) | |||
ClearStrategy.DebugJobSize = sec.Key("DEBUG_BATCH_SIZE").MustInt(100) | |||
ClearStrategy.TrashSaveDays = sec.Key("TRASH_SAVE_DAYS").MustInt(90) | |||
ClearStrategy.Cron = sec.Key("CRON").MustString("* 0,30 2-8 * * ?") | |||
ClearStrategy.RunAtStart = sec.Key("RUN_AT_START").MustBool(false) | |||
} | |||
func getGrampusConfig() { | |||
@@ -1781,6 +1781,7 @@ func SetRadarMapConfig() { | |||
RadarMap.LivenessRelease = sec.Key("liveness_release").MustFloat64(0.4) | |||
RadarMap.ProjectHealth = sec.Key("project_health").MustFloat64(0.1) | |||
RadarMap.ProjectHealthIssueCompleteRatio = sec.Key("project_health_issue_complete_ratio").MustFloat64(100) | |||
RadarMap.ProjectHealth0IssueCloseRatio = sec.Key("project_health_0_issue_close_ratio").MustFloat64(0.0) | |||
RadarMap.TeamHealth = sec.Key("team_health").MustFloat64(0.1) | |||
RadarMap.TeamHealthContributors = sec.Key("team_health_contributors").MustFloat64(0.2) | |||
RadarMap.TeamHealthKeyContributors = sec.Key("team_health_key_contributors").MustFloat64(0.6) | |||
@@ -47,6 +47,7 @@ type CreateFileNotebookJobOption struct { | |||
BranchName string `json:"branch_name" binding:"Required"` | |||
OwnerName string `json:"owner_name" binding:"Required"` | |||
ProjectName string `json:"project_name" binding:"Required"` | |||
JobId string `json:"job_id"` | |||
} | |||
type Cloudbrain struct { | |||
@@ -577,6 +577,7 @@ static.CloudBrainTaskNum=CloudBrain Task Count | |||
static.CloudBrainRunTime=CloudBrain Run Time | |||
static.CommitDatasetNum=Commit Dataset Count | |||
static.CommitModelCount=Commit Model Count | |||
static.ModelConvertCount=Model Convert Count | |||
static.UserIndex=Normalized user index | |||
static.UserIndexPrimitive=User Index | |||
static.countdate=Count Date | |||
@@ -1061,7 +1062,7 @@ model_rename=Duplicate model name, please modify model name. | |||
notebook_file_not_exist=Notebook file does not exist. | |||
notebook_select_wrong=Please select a Notebook(.ipynb) file first. | |||
notebook_file_no_right=You have no right to access the Notebook(.ipynb) file. | |||
debug_again_fail=Fail to restart debug task, please try again later. | |||
notebook_repo_conflict=The files in different branches of the same repository can not run together. | |||
date=Date | |||
repo_add=Project Increment | |||
@@ -2755,6 +2756,10 @@ repos.pr=PR | |||
repos.commit=Commit | |||
repos.closedIssues=Closed Issue | |||
repos.contributor=Contributor | |||
repos.numDataset=Dataset File | |||
repos.numCloudbrain=Cloudbrain Task | |||
repos.numModel=Model | |||
repos.numModelConvert=Model Convert Task | |||
repos.yes=Yes | |||
repos.no=No | |||
@@ -3307,7 +3312,7 @@ new_debug_gpu_tooltips = The code is storaged in <strong style="color:#010101">% | |||
new_debug_gpu_tooltips1 = The code is storaged in <strong style="color:#010101">%s</strong>, the dataset is storaged in <strong style="color:#010101">%s</strong>, the pre-trained model is storaged in the <strong style="color:#010101">%s</strong>. | |||
new_train_npu_tooltips = The code is storaged in <strong style="color:#010101">%s</strong>, the pre-trained model is storaged in the run parameter <strong style="color:#010101">%s</strong>, and please put your model into <strong style="color:#010101">%s</strong> then you can download it online | |||
new_infer_gpu_tooltips = The dataset is stored in <strong style="color:#010101">%s</strong>, the model file is stored in <strong style="color:#010101">%s</strong>, please store the inference output in <strong style="color:#010101">%s</strong> for subsequent downloads. | |||
code_obs_address = Code OBS address | |||
code_obs_address = Code OBS address | |||
[points] | |||
points = points | |||
@@ -581,6 +581,7 @@ static.CloudBrainTaskNum=云脑任务数 | |||
static.CloudBrainRunTime=云脑运行时间(小时) | |||
static.CommitDatasetNum=上传(提交)数据集文件数 | |||
static.CommitModelCount=提交模型数 | |||
static.ModelConvertCount=模型转换数 | |||
static.UserIndex=归一化用户指数 | |||
static.UserIndexPrimitive=用户指数 | |||
static.countdate=系统统计时间 | |||
@@ -1060,7 +1061,7 @@ model_rename=模型名称重复,请修改模型名称 | |||
notebook_file_not_exist=Notebook文件不存在。 | |||
notebook_select_wrong=请先选择Notebook(.ipynb)文件。 | |||
notebook_file_no_right=您没有这个Notebook文件的读权限。 | |||
debug_again_fail=再次调试失败,请稍后再试。 | |||
notebook_repo_conflict=同一个仓库的不同分支文件不能同时运行。 | |||
date=日期 | |||
repo_add=新增项目 | |||
@@ -1672,7 +1673,7 @@ issues.action_assignee_no_select=未指派 | |||
issues.opened_by=由 <a href="%[2]s">%[3]s</a> 于 %[1]s创建 | |||
pulls.merged_by=由 <a href="%[2]s">%[3]s</a> 于 %[1]s 合并 | |||
pulls.merged_by_fake=由 %[2]s 于 %[1]s 合并 | |||
issues.closed_by=按 <a href="%[2]s">%[3]s</a> 关闭%[1]s | |||
issues.closed_by=由 <a href="%[2]s">%[3]s</a> 创建,被关闭于 %[1]s | |||
issues.opened_by_fake=由 %[2]s 于 %[1]s创建 | |||
issues.closed_by_fake=通过 %[2]s 关闭 %[1]s | |||
issues.previous=上一页 | |||
@@ -2773,6 +2774,11 @@ repos.pr=PR数 | |||
repos.commit=Commit数 | |||
repos.closedIssues=已解决任务数 | |||
repos.contributor=贡献者数 | |||
repos.numDataset=数据集文件数 | |||
repos.numCloudbrain=云脑任务数 | |||
repos.numModel=模型数 | |||
repos.numModelConvert=转换任务数 | |||
repos.yes=是 | |||
repos.no=否 | |||
@@ -745,7 +745,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Group("/file_notebook", func() { | |||
m.Get("", repo.GetFileNoteBookInfo) | |||
m.Post("/create", reqToken(), reqWeChat(), bind(api.CreateFileNotebookJobOption{}), repo.CreateFileNoteBook) | |||
m.Post("/status", reqToken(), bind(api.CreateFileNotebookJobOption{}), repo.FileNoteBookStatus) | |||
}) | |||
m.Group("/repos", func() { | |||
@@ -1024,6 +1024,8 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Get("/query_model_byName", repo.QueryModelByName) | |||
m.Get("/query_model_for_predict", repo.QueryModelListForPredict) | |||
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict) | |||
m.Get("/query_train_job", repo.QueryTrainJobList) | |||
m.Get("/query_train_job_version", repo.QueryTrainJobVersionList) | |||
m.Get("/query_train_model", repo.QueryTrainModelList) | |||
m.Post("/create_model_convert", repo.CreateModelConvert) | |||
m.Post("/convert_stop", repo.StopModelConvert) | |||
@@ -110,6 +110,9 @@ func GeneralCloudBrainJobStop(ctx *context.APIContext) { | |||
func CreateFileNoteBook(ctx *context.APIContext, option api.CreateFileNotebookJobOption) { | |||
cloudbrainTask.FileNotebookCreate(ctx.Context, option) | |||
} | |||
func FileNoteBookStatus(ctx *context.APIContext, option api.CreateFileNotebookJobOption) { | |||
cloudbrainTask.FileNotebookStatus(ctx.Context, option) | |||
} | |||
func GetFileNoteBookInfo(ctx *context.APIContext) { | |||
//image description spec description waiting count | |||
@@ -4,8 +4,10 @@ import ( | |||
"net/http" | |||
"code.gitea.io/gitea/modules/context" | |||
"code.gitea.io/gitea/modules/convert" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/storage" | |||
api "code.gitea.io/gitea/modules/structs" | |||
routerRepo "code.gitea.io/gitea/routers/repo" | |||
) | |||
@@ -54,6 +56,21 @@ func QueryModelListForPredict(ctx *context.APIContext) { | |||
routerRepo.QueryModelListForPredict(ctx.Context) | |||
} | |||
func QueryTrainJobList(ctx *context.APIContext) { | |||
result, err := routerRepo.QueryTrainJobListApi(ctx.Context) | |||
if err != nil { | |||
log.Info("query error." + err.Error()) | |||
ctx.JSON(http.StatusOK, nil) | |||
} else { | |||
re := make([]*api.Cloudbrain, 0) | |||
for _, task := range result { | |||
conRe := convert.ToCloudBrain(task) | |||
re = append(re, conRe) | |||
} | |||
ctx.JSON(http.StatusOK, re) | |||
} | |||
} | |||
func QueryTrainModelList(ctx *context.APIContext) { | |||
result, err := routerRepo.QueryTrainModelFileById(ctx.Context) | |||
if err != nil { | |||
@@ -63,6 +80,21 @@ func QueryTrainModelList(ctx *context.APIContext) { | |||
ctx.JSON(http.StatusOK, re) | |||
} | |||
func QueryTrainJobVersionList(ctx *context.APIContext) { | |||
result, err := routerRepo.QueryTrainJobVersionListApi(ctx.Context) | |||
if err != nil { | |||
log.Info("query error." + err.Error()) | |||
ctx.JSON(http.StatusOK, nil) | |||
} else { | |||
re := make([]*api.Cloudbrain, 0) | |||
for _, task := range result { | |||
conRe := convert.ToCloudBrain(task) | |||
re = append(re, conRe) | |||
} | |||
ctx.JSON(http.StatusOK, re) | |||
} | |||
} | |||
func convertFileFormat(result []storage.FileInfo) []FileInfo { | |||
re := make([]FileInfo, 0) | |||
if result != nil { | |||
@@ -601,7 +601,7 @@ func getSummaryFileName(ctx *context.Context, beginTime time.Time, endTime time. | |||
func allProjectsPeroidHeader(ctx *context.Context) map[string]string { | |||
return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.openi"), "F1": ctx.Tr("admin.repos.visit"), "G1": ctx.Tr("admin.repos.download"), "H1": ctx.Tr("admin.repos.pr"), "I1": ctx.Tr("admin.repos.commit"), | |||
"J1": ctx.Tr("admin.repos.watches"), "K1": ctx.Tr("admin.repos.stars"), "L1": ctx.Tr("admin.repos.forks"), "M1": ctx.Tr("admin.repos.issues"), "N1": ctx.Tr("admin.repos.closedIssues"), "O1": ctx.Tr("admin.repos.contributor"), "P1": ctx.Tr("admin.repos.isFork"), "Q1": ctx.Tr("admin.repos.isMirror"), "R1": ctx.Tr("admin.repos.create")} | |||
"J1": ctx.Tr("admin.repos.watches"), "K1": ctx.Tr("admin.repos.stars"), "L1": ctx.Tr("admin.repos.forks"), "M1": ctx.Tr("admin.repos.issues"), "N1": ctx.Tr("admin.repos.closedIssues"), "O1": ctx.Tr("admin.repos.contributor"), "P1": ctx.Tr("admin.repos.numDataset"), "Q1": ctx.Tr("admin.repos.numCloudbrain"), "R1": ctx.Tr("admin.repos.numModel"), "S1": ctx.Tr("admin.repos.numModelConvert"), "T1": ctx.Tr("admin.repos.isFork"), "U1": ctx.Tr("admin.repos.isMirror"), "V1": ctx.Tr("admin.repos.create")} | |||
} | |||
@@ -619,11 +619,13 @@ func allProjectsPeriodSummaryValues(row int, rs *ProjectSummaryBaseData, ctx *co | |||
} | |||
func allProjectsPeroidValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string { | |||
return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), | |||
getCellName("F", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("G", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("H", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("I", row): strconv.FormatInt(rs.NumCommits, 10), | |||
getCellName("J", row): strconv.FormatInt(rs.NumWatches, 10), getCellName("K", row): strconv.FormatInt(rs.NumStars, 10), getCellName("L", row): strconv.FormatInt(rs.NumForks, 10), getCellName("M", row): strconv.FormatInt(rs.NumIssues, 10), | |||
getCellName("N", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("O", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("P", row): getBoolDisplay(rs.IsFork, ctx), getCellName("Q", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("R", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), | |||
getCellName("N", row): strconv.FormatInt(rs.NumClosedIssues, 10), getCellName("O", row): strconv.FormatInt(rs.NumContributor, 10), getCellName("P", row): strconv.FormatInt(rs.NumDatasetFile, 10), getCellName("Q", row): strconv.FormatInt(rs.NumCloudbrain, 10), getCellName("R", row): strconv.FormatInt(rs.NumModels, 10), getCellName("S", row): strconv.FormatInt(rs.NumModelConvert, 10), getCellName("T", row): getBoolDisplay(rs.IsFork, ctx), getCellName("U", row): getBoolDisplay(rs.IsMirror, ctx), getCellName("V", row): time.Unix(int64(rs.RepoCreatedUnix), 0).Format(CREATE_TIME_FORMAT), | |||
} | |||
} | |||
func allProjectsOpenIHeader() map[string]string { | |||
@@ -804,11 +806,11 @@ func generateOpenICountSql(latestDate string) string { | |||
} | |||
func generateTypeAllSql(beginTime time.Time, endTime time.Time, latestDate string, q string, orderBy string, page int, pageSize int) string { | |||
sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor FROM " + | |||
sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file FROM " + | |||
"(SELECT repo_id,sum(num_visits) as num_visits " + | |||
" FROM repo_statistic where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + " group by repo_id) A," + | |||
"(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor from public.repo_statistic where date='" + latestDate + "') B" + | |||
"(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file from public.repo_statistic where date='" + latestDate + "') B" + | |||
" where A.repo_id=B.repo_id" | |||
if q != "" { | |||
@@ -828,8 +830,8 @@ func generateTypeAllOpenISql(latestDate string, page int, pageSize int) string { | |||
func generatePageSql(beginTime time.Time, endTime time.Time, latestDate string, q string, orderBy string, page int, pageSize int) string { | |||
sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor FROM " + | |||
"(SELECT repo_id,sum(num_watches_added) as num_watches,sum(num_visits) as num_visits, sum(num_downloads_added) as num_downloads,sum(num_pulls_added) as num_pulls,sum(num_commits_added) as num_commits,sum(num_stars_added) as num_stars,sum(num_forks_added) num_forks,sum(num_issues_added) as num_issues,sum(num_closed_issues_added) as num_closed_issues,sum(num_contributor_added) as num_contributor " + | |||
sql := "SELECT A.repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total,num_watches,num_visits,num_downloads,num_pulls,num_commits,num_stars,num_forks,num_issues,num_closed_issues,num_contributor,num_models,num_model_convert,num_cloudbrain,num_dataset_file FROM " + | |||
"(SELECT repo_id,sum(num_watches_added) as num_watches,sum(num_visits) as num_visits, sum(num_downloads_added) as num_downloads,sum(num_pulls_added) as num_pulls,sum(num_commits_added) as num_commits,sum(num_stars_added) as num_stars,sum(num_forks_added) num_forks,sum(num_issues_added) as num_issues,sum(num_closed_issues_added) as num_closed_issues,sum(num_contributor_added) as num_contributor,sum(num_models_added) as num_models,sum(num_model_convert_added) as num_model_convert,sum(num_dataset_file_added) as num_dataset_file, sum(num_cloudbrain_added) as num_cloudbrain " + | |||
" FROM repo_statistic where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + | |||
" and created_unix<" + strconv.FormatInt(endTime.Unix(), 10) + " group by repo_id) A," + | |||
"(SELECT repo_id,name,alias,owner_name,is_private,is_mirror,is_fork,repo_created_unix,radar_total from public.repo_statistic where date='" + latestDate + "') B" + | |||
@@ -2,7 +2,6 @@ package repo | |||
import ( | |||
"archive/zip" | |||
"code.gitea.io/gitea/services/repository" | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
@@ -12,6 +11,8 @@ import ( | |||
"regexp" | |||
"strings" | |||
"code.gitea.io/gitea/services/repository" | |||
"code.gitea.io/gitea/models" | |||
"code.gitea.io/gitea/modules/context" | |||
"code.gitea.io/gitea/modules/log" | |||
@@ -710,36 +711,42 @@ func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *con | |||
} | |||
func QueryTrainJobVersionList(ctx *context.Context) { | |||
VersionListTasks, err := QueryTrainJobVersionListApi(ctx) | |||
if err != nil { | |||
ctx.JSON(200, nil) | |||
} else { | |||
ctx.JSON(200, VersionListTasks) | |||
} | |||
} | |||
func QueryTrainJobVersionListApi(ctx *context.Context) ([]*models.Cloudbrain, error) { | |||
log.Info("query train job version list. start.") | |||
JobID := ctx.Query("jobId") | |||
if JobID == "" { | |||
JobID = ctx.Query("JobId") | |||
} | |||
VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID) | |||
log.Info("query return count=" + fmt.Sprint(count)) | |||
return VersionListTasks, err | |||
} | |||
func QueryTrainJobList(ctx *context.Context) { | |||
VersionListTasks, err := QueryTrainJobListApi(ctx) | |||
if err != nil { | |||
ctx.ServerError("QueryTrainJobList:", err) | |||
ctx.JSON(200, nil) | |||
} else { | |||
ctx.JSON(200, VersionListTasks) | |||
} | |||
} | |||
func QueryTrainJobList(ctx *context.Context) { | |||
log.Info("query train job list. start.") | |||
func QueryTrainJobListApi(ctx *context.Context) ([]*models.Cloudbrain, error) { | |||
repoId := ctx.QueryInt64("repoId") | |||
VersionListTasks, count, err := models.QueryModelTrainJobList(repoId) | |||
log.Info("query return count=" + fmt.Sprint(count)) | |||
if err != nil { | |||
ctx.ServerError("QueryTrainJobList:", err) | |||
} else { | |||
ctx.JSON(200, VersionListTasks) | |||
} | |||
return VersionListTasks, err | |||
} | |||
func QueryTrainModelFileById(ctx *context.Context) ([]storage.FileInfo, error) { | |||
@@ -911,10 +911,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo | |||
func CloudBrainDebug(ctx *context.Context) { | |||
task := ctx.Cloudbrain | |||
debugUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName | |||
if task.BootFile != "" { | |||
ctx.Redirect(getFileUrl(debugUrl, task.BootFile)) | |||
if ctx.QueryTrim("file") != "" { | |||
ctx.Redirect(getFileUrl(debugUrl, ctx.QueryTrim("file"))) | |||
} else { | |||
if task.BootFile != "" { | |||
go cloudbrainTask.UploadNotebookFiles(task) | |||
} | |||
ctx.Redirect(debugUrl) | |||
} | |||
@@ -1638,6 +1641,21 @@ func CloudBrainDownloadModel(ctx *context.Context) { | |||
ctx.Resp.Header().Set("Cache-Control", "max-age=0") | |||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||
} | |||
func CloudBrainDownloadMultiModel(ctx *context.Context) { | |||
parentDir := ctx.Query("parentDir") | |||
jobName := ctx.Query("jobName") | |||
filePath := "jobs/" + jobName + "/model/" + parentDir | |||
allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, filePath) | |||
if err == nil { | |||
returnFileName := jobName + ".zip" | |||
MinioDownloadManyFile(filePath, ctx, returnFileName, allFile) | |||
} else { | |||
log.Info("error,msg=" + err.Error()) | |||
ctx.ServerError("no file to download.", err) | |||
} | |||
} | |||
func CloudBrainDownloadInferenceResult(ctx *context.Context) { | |||
parentDir := ctx.Query("parentDir") | |||
fileName := ctx.Query("fileName") | |||
@@ -439,9 +439,13 @@ func NotebookDebug2(ctx *context.Context) { | |||
ctx.RenderWithErr(err.Error(), tplModelArtsNotebookIndex, nil) | |||
return | |||
} | |||
if task.BootFile != "" { | |||
ctx.Redirect(getFileUrl(result.Url, task.BootFile) + "?token=" + result.Token) | |||
if ctx.QueryTrim("file") != "" { | |||
ctx.Redirect(getFileUrl(result.Url, ctx.QueryTrim("file")) + "?token=" + result.Token) | |||
} else { | |||
if task.BootFile != "" { | |||
go cloudbrainTask.UploadNotebookFiles(task) | |||
} | |||
ctx.Redirect(result.Url + "?token=" + result.Token) | |||
} | |||
@@ -463,7 +467,7 @@ func getFileUrl(url string, filename string) string { | |||
} | |||
} | |||
return url + middle + path.Base(filename) | |||
return url + middle + filename | |||
} | |||
func NotebookRestart(ctx *context.Context) { | |||
@@ -630,7 +634,7 @@ func NotebookStop(ctx *context.Context) { | |||
if task.Status != string(models.ModelArtsRunning) { | |||
log.Error("the job(%s) is not running", task.JobName, ctx.Data["MsgID"]) | |||
resultCode = "-1" | |||
errorMsg = "the job is not running" | |||
errorMsg = ctx.Tr("cloudbrain.Already_stopped") | |||
break | |||
} | |||
@@ -2653,6 +2657,44 @@ func InferenceJobShow(ctx *context.Context) { | |||
ctx.HTML(http.StatusOK, tplModelArtsInferenceJobShow) | |||
} | |||
func MultiModelDownload(ctx *context.Context) { | |||
var ( | |||
err error | |||
) | |||
jobID := ctx.Params(":jobid") | |||
versionName := ctx.Query("version_name") | |||
parentDir := ctx.Query("parent_dir") | |||
task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName) | |||
if err != nil { | |||
log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", task.JobName, err.Error()) | |||
return | |||
} | |||
if task.ComputeResource == models.NPUResource { | |||
path := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, setting.OutPutPath, versionName, parentDir), "/") | |||
allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path) | |||
if err == nil { | |||
returnFileName := task.DisplayJobName + ".zip" | |||
ObsDownloadManyFile(path, ctx, returnFileName, allFile) | |||
} else { | |||
log.Info("error,msg=" + err.Error()) | |||
ctx.ServerError("no file to download.", err) | |||
} | |||
} else if task.ComputeResource == models.GPUResource { | |||
filePath := setting.CBCodePathPrefix + task.JobName + cloudbrain.ModelMountPath + "/" + parentDir | |||
allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, filePath) | |||
if err == nil { | |||
returnFileName := task.DisplayJobName + ".zip" | |||
MinioDownloadManyFile(filePath, ctx, returnFileName, allFile) | |||
} else { | |||
log.Info("error,msg=" + err.Error()) | |||
ctx.ServerError("no file to download.", err) | |||
} | |||
} | |||
} | |||
func ModelDownload(ctx *context.Context) { | |||
var ( | |||
err error | |||
@@ -75,7 +75,7 @@ func RepoStatisticDaily(date string) { | |||
if repo.NumIssues != 0 { | |||
issueFixedRate = float32(repo.NumClosedIssues) / float32(repo.NumIssues) | |||
} else { | |||
issueFixedRate = 1.0 | |||
issueFixedRate = float32(setting.RadarMap.ProjectHealth0IssueCloseRatio) | |||
} | |||
var numVersions int64 | |||
@@ -124,7 +124,7 @@ func RepoStatisticDaily(date string) { | |||
NumDevMonths: numDevMonths, | |||
RepoSize: repo.Size, | |||
DatasetSize: datasetSize, | |||
NumModels: 0, | |||
NumModels: repo.ModelCnt, | |||
NumWikiViews: numWikiViews, | |||
NumCommits: numCommits, | |||
NumIssues: int64(repo.NumIssues), | |||
@@ -135,6 +135,9 @@ func RepoStatisticDaily(date string) { | |||
NumCommitsGrowth: numCommitsGrowth, | |||
NumCommitLinesGrowth: numCommitLinesGrowth, | |||
NumContributorsGrowth: numContributorsGrowth, | |||
NumCloudbrain: repo.AiTaskCnt, | |||
NumDatasetFile: repo.DatasetCnt, | |||
NumModelConvert: models.QueryModelConvertCountByRepoID(repo.ID), | |||
} | |||
dayBeforeDate := t.AddDate(0, 0, -1).Format("2006-01-02") | |||
@@ -155,6 +158,10 @@ func RepoStatisticDaily(date string) { | |||
repoStat.NumIssuesAdded = repoStat.NumIssues - repoStatisticBefore.NumIssues | |||
repoStat.NumPullsAdded = repoStat.NumPulls - repoStatisticBefore.NumPulls | |||
repoStat.NumContributorAdded = repoStat.NumContributor - repoStatisticBefore.NumContributor | |||
repoStat.NumModelsAdded = repoStat.NumModels - repoStatisticBefore.NumModels | |||
repoStat.NumCloudbrainAdded = repoStat.NumCloudbrain - repoStatisticBefore.NumCloudbrain | |||
repoStat.NumModelConvertAdded = repoStat.NumModelConvert - repoStatisticBefore.NumModelConvert | |||
repoStat.NumDatasetFileAdded = repoStat.NumDatasetFile - repoStatisticBefore.NumDatasetFile | |||
} | |||
} | |||
day4MonthsAgo := t.AddDate(0, -4, 0) | |||
@@ -1186,6 +1186,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Get("/rate", reqRepoCloudBrainReader, repo.GetRate) | |||
m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels) | |||
m.Get("/download_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadModel) | |||
m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadMultiModel) | |||
}) | |||
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.CloudBrainNew) | |||
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate) | |||
@@ -1209,6 +1210,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainTrainJobDel) | |||
//m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels) | |||
m.Get("/download_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadModel) | |||
m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadMultiModel) | |||
//m.Get("/get_log", cloudbrain.AdminOrJobCreaterRightForTrain, repo.GetLogFromModelDir) | |||
//m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion) | |||
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainTrainJobVersionNew) | |||
@@ -1221,7 +1223,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Group("/:jobid", func() { | |||
m.Get("", reqRepoCloudBrainReader, repo.InferenceCloudBrainJobShow) | |||
m.Get("/result_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.CloudBrainDownloadInferenceResult) | |||
m.Get("/download_multi_model", cloudbrain.AdminOrJobCreaterRight, repo.CloudBrainDownloadMultiModel) | |||
m.Get("/downloadall", repo.DownloadInferenceResultFile) | |||
}) | |||
m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.InferenceCloudBrainJobNew) | |||
@@ -4,6 +4,9 @@ import ( | |||
"fmt" | |||
"net/http" | |||
"path" | |||
"strings" | |||
"code.gitea.io/gitea/modules/notebook" | |||
"code.gitea.io/gitea/modules/modelarts" | |||
"code.gitea.io/gitea/modules/modelarts_cd" | |||
@@ -29,6 +32,9 @@ import ( | |||
) | |||
const NoteBookExtension = ".ipynb" | |||
const CPUType = 0 | |||
const GPUType = 1 | |||
const NPUType = 2 | |||
func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption) { | |||
@@ -66,7 +72,7 @@ func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOp | |||
} | |||
//create repo if not exist | |||
repo, err := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName) | |||
repo, _ := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName) | |||
if repo == nil { | |||
repo, err = repo_service.CreateRepository(ctx.User, ctx.User, models.CreateRepoOptions{ | |||
Name: setting.FileNoteBook.ProjectName, | |||
@@ -80,19 +86,222 @@ func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOp | |||
AutoInit: true, | |||
DefaultBranch: "master", | |||
}) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo", setting.FileNoteBook.ProjectName))) | |||
return | |||
} | |||
} else { | |||
noteBook, _ := models.GetWaitOrRunFileNotebookByRepo(repo.ID, getCloudbrainType(option.Type)) | |||
if noteBook != nil { | |||
if isRepoConfilcts(option, noteBook) { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_repo_conflict"))) | |||
return | |||
} | |||
if isNotebookSpecMath(option, noteBook) { | |||
if !isRepoMatch(option, noteBook) { | |||
err = downloadCode(sourceRepo, getCodePath(noteBook.JobName, sourceRepo), option.BranchName) | |||
if err != nil { | |||
log.Error("download code failed", err) | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) | |||
return | |||
} | |||
} | |||
if !isRepoFileMatch(option, noteBook) { | |||
noteBook.BootFile += ";" + getBootFile(option.File, option.OwnerName, option.ProjectName) | |||
noteBook.BranchName += ";" + option.BranchName | |||
noteBook.Description += ";" + getDescription(option) | |||
err := models.UpdateJob(noteBook) | |||
if err != nil { | |||
log.Error("GenerateNotebook2 failed, %v", err, ctx.Data["MsgID"]) | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
return | |||
} | |||
} | |||
ctx.JSON(http.StatusOK, models.BaseMessageApi{ | |||
Code: 0, | |||
Message: noteBook.JobID, | |||
}) | |||
return | |||
} | |||
} | |||
} | |||
if option.Type <= GPUType { | |||
cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo) | |||
} else { | |||
modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo) | |||
} | |||
} | |||
func FileNotebookStatus(ctx *context.Context, option api.CreateFileNotebookJobOption) { | |||
if ctx.Written() { | |||
return | |||
} | |||
if path.Ext(option.File) != NoteBookExtension { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_select_wrong"))) | |||
return | |||
} | |||
isNotebookFileExist, _ := isNoteBookFileExist(ctx, option) | |||
if !isNotebookFileExist { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) | |||
return | |||
} | |||
task, err := models.GetCloudbrainByJobID(option.JobId) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo", setting.FileNoteBook.ProjectName))) | |||
log.Error("job not found:"+option.JobId, err) | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Job id may not be right. can not find job.")) | |||
return | |||
} | |||
if option.Type <= 1 { | |||
cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo) | |||
if task.BootFile == "" || task.Status != string(models.ModelArtsRunning) { | |||
log.Warn("Boot file is empty or status is running. ") | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Boot file is empty or status is running.")) | |||
return | |||
} | |||
if !isRepoFileMatch(option, task) { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("can not math repo file.")) | |||
return | |||
} | |||
debugBaseUrl, token, err := getBaseUrlAndToken(task) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) | |||
return | |||
} | |||
if uploadNotebookFileIfCannotBroswer(debugBaseUrl, getBootFile(option.File, option.OwnerName, option.ProjectName), task, token) { | |||
ctx.JSON(http.StatusOK, models.BaseOKMessageApi) | |||
} else { | |||
modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo) | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("upload failed.")) | |||
} | |||
} | |||
func getBaseUrlAndToken(task *models.Cloudbrain) (string, string, error) { | |||
var debugBaseUrl string | |||
var token string | |||
if task.Type == models.TypeCloudBrainOne { | |||
debugBaseUrl = setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName + "/lab" | |||
} else { | |||
var result *models.GetNotebook2Result | |||
var err error | |||
if task.Type == models.TypeCloudBrainTwo { | |||
result, err = modelarts.GetNotebook2(task.JobID) | |||
} else if task.Type == models.TypeCDCenter { | |||
result, err = modelarts_cd.GetNotebook(task.JobID) | |||
} | |||
if err != nil || result == nil || result.Status != string(models.ModelArtsRunning) || result.Url == "" { | |||
log.Error("notebook job not found:"+task.JobID, err) | |||
return "", "", fmt.Errorf("can not get job or job is invalid.") | |||
} | |||
debugBaseUrl = result.Url | |||
token = result.Token | |||
} | |||
return debugBaseUrl, token, nil | |||
} | |||
func uploadNotebookFileIfCannotBroswer(debugBaseUrl string, bootFile string, task *models.Cloudbrain, token string) bool { | |||
c := ¬ebook.NotebookContent{ | |||
Url: debugBaseUrl, | |||
Path: bootFile, | |||
PathType: "file", | |||
Token: token, | |||
} | |||
if c.IsNotebookFileCanBrowser() { | |||
return true | |||
} else { | |||
c.SetCookiesAndCsrf() | |||
c.UploadNoteBookFile(task) | |||
return c.IsNotebookFileCanBrowser() | |||
} | |||
} | |||
func isNotebookSpecMath(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool { | |||
if option.Type == NPUType || option.Type == CPUType { | |||
return true | |||
} | |||
spec, err := models.GetCloudbrainSpecByID(book.ID) | |||
if err != nil { | |||
log.Warn("can not get spec ", err) | |||
return false | |||
} | |||
return spec.AccCardsNum > 0 | |||
} | |||
func isRepoConfilcts(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool { | |||
bootFiles := strings.Split(book.BootFile, ";") | |||
branches := strings.Split(book.BranchName, ";") | |||
for i, bootFile := range bootFiles { | |||
splits := strings.Split(bootFile, "/") | |||
if len(splits) >= 3 { | |||
if splits[0] == option.OwnerName && splits[1] == option.ProjectName && branches[i] != option.BranchName { | |||
return true | |||
} | |||
} | |||
} | |||
return false | |||
} | |||
func isRepoMatch(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool { | |||
bootFiles := strings.Split(book.BootFile, ";") | |||
for _, bootFile := range bootFiles { | |||
splits := strings.Split(bootFile, "/") | |||
if len(splits) >= 3 { | |||
if splits[0] == option.OwnerName && splits[1] == option.ProjectName { | |||
return true | |||
} | |||
} | |||
} | |||
return false | |||
} | |||
func isRepoFileMatch(option api.CreateFileNotebookJobOption, book *models.Cloudbrain) bool { | |||
bootFiles := strings.Split(book.BootFile, ";") | |||
branches := strings.Split(book.BranchName, ";") | |||
for i, bootFile := range bootFiles { | |||
if branches[i] == option.BranchName && getBootFile(option.File, option.OwnerName, option.ProjectName) == bootFile { | |||
return true | |||
} | |||
} | |||
return false | |||
} | |||
func UploadNotebookFiles(task *models.Cloudbrain) { | |||
if task.Status == string(models.JobRunning) && task.BootFile != "" { | |||
debugBaseUrl, token, err := getBaseUrlAndToken(task) | |||
if err != nil { | |||
log.Error("can not get base url:", err) | |||
return | |||
} | |||
bootFiles := strings.Split(task.BootFile, ";") | |||
for _, bootFile := range bootFiles { | |||
uploadNotebookFileIfCannotBroswer(debugBaseUrl, bootFile, task, token) | |||
} | |||
} | |||
} | |||
func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) { | |||
displayJobName := cloudbrainService.GetDisplayJobName(ctx.User.Name) | |||
@@ -131,17 +340,18 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot | |||
} else { | |||
if count >= 1 { | |||
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) | |||
ctx.JSON(http.StatusOK,models.BaseMessageApi{ | |||
Code: 2, | |||
ctx.JSON(http.StatusOK, models.BaseMessageApi{ | |||
Code: 2, | |||
Message: ctx.Tr("repo.cloudbrain.morethanonejob"), | |||
}) | |||
return | |||
} | |||
} | |||
errStr := uploadCodeFile(sourceRepo, getCodePath(jobName), option.BranchName, option.File, jobName) | |||
if errStr != "" { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) | |||
err = downloadCode(sourceRepo, getCodePath(jobName, sourceRepo), option.BranchName) | |||
if err != nil { | |||
log.Error("download code failed", err) | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) | |||
return | |||
} | |||
command := cloudbrain.GetCloudbrainDebugCommand() | |||
@@ -185,7 +395,7 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot | |||
JobType: jobType, | |||
Description: getDescription(option), | |||
BranchName: option.BranchName, | |||
BootFile: option.File, | |||
BootFile: getBootFile(option.File, option.OwnerName, option.ProjectName), | |||
Params: "{\"parameter\":[]}", | |||
CommitID: "", | |||
BenchmarkTypeID: 0, | |||
@@ -206,8 +416,18 @@ func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNot | |||
} | |||
func getCodePath(jobName string) string { | |||
return setting.JobPath + jobName + cloudbrain.CodeMountPath | |||
func getCloudbrainType(optionType int) int { | |||
if optionType < 1 { | |||
return models.TypeCloudBrainOne | |||
} | |||
if setting.ModelartsCD.Enabled { | |||
return models.TypeCDCenter | |||
} | |||
return models.TypeCloudBrainTwo | |||
} | |||
func getCodePath(jobName string, repo *models.Repository) string { | |||
return setting.JobPath + jobName + cloudbrain.CodeMountPath + "/" + repo.OwnerName + "/" + repo.Name | |||
} | |||
func getDescription(option api.CreateFileNotebookJobOption) string { | |||
@@ -237,8 +457,8 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote | |||
} else { | |||
if count >= 1 { | |||
log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) | |||
ctx.JSON(http.StatusOK,models.BaseMessageApi{ | |||
Code: 2, | |||
ctx.JSON(http.StatusOK, models.BaseMessageApi{ | |||
Code: 2, | |||
Message: ctx.Tr("repo.cloudbrain.morethanonejob"), | |||
}) | |||
return | |||
@@ -260,7 +480,7 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote | |||
} | |||
} | |||
err = downloadCode(sourceRepo, getCodePath(jobName), option.BranchName) | |||
err = downloadCode(sourceRepo, getCodePath(jobName, sourceRepo), option.BranchName) | |||
if err != nil { | |||
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) | |||
return | |||
@@ -297,8 +517,9 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote | |||
Description: getDescription(option), | |||
ImageId: setting.FileNoteBook.ImageIdNPU, | |||
Spec: spec, | |||
BootFile: "", | |||
BootFile: getBootFile(option.File, option.OwnerName, option.ProjectName), | |||
AutoStopDurationMs: modelarts.AutoStopDurationMs / 4, | |||
BranchName: option.BranchName, | |||
} | |||
if setting.ModelartsCD.Enabled { | |||
@@ -347,17 +568,8 @@ func isNoteBookFileExist(ctx *context.Context, option api.CreateFileNotebookJobO | |||
return true, nil | |||
} | |||
func uploadCodeFile(repo *models.Repository, codePath string, branchName string, filePath string, jobName string) string { | |||
err := downloadCode(repo, codePath, branchName) | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
err = uploadOneFileToMinio(codePath, filePath, jobName, cloudbrain.CodeMountPath+"/") | |||
if err != nil { | |||
return "cloudbrain.load_code_failed" | |||
} | |||
return "" | |||
func getBootFile(filePath string, ownerName string, projectName string) string { | |||
return ownerName + "/" + projectName + "/" + filePath | |||
} | |||
func fileExists(gitRepo *git.Repository, path string, branch string) (bool, error) { | |||
@@ -29,7 +29,7 @@ export const getCb1Notebook = (path,jobid) => { | |||
}); | |||
}; | |||
// Notebook获取云脑I调试任务状态 | |||
// Notebook获取云脑II调试任务状态 | |||
export const getCb2Notebook = (path,jobid) => { | |||
return service({ | |||
url: `/api/v1/${path}/modelarts/notebook/${jobid}`, | |||
@@ -37,7 +37,16 @@ export const getCb2Notebook = (path,jobid) => { | |||
params: {}, | |||
}); | |||
}; | |||
// Notebook查询文件在环境中是否已准备好 | |||
// type, file, branch_name, owner_name, project_name,job_id | |||
export const getFileInfoNotebook = (data) => { | |||
return service({ | |||
url: "/api/v1/file_notebook/status", | |||
method: "post", | |||
data, | |||
params: {}, | |||
}); | |||
}; | |||
export const stopNotebook = (url) => { | |||
return service({ | |||
url: url, | |||
@@ -178,9 +178,9 @@ | |||
</div> | |||
</template> | |||
<script> | |||
import { getFileNotebook,createNotebook,getCb1Notebook,getCb2Notebook,stopNotebook } from "~/apis/modules/notobook"; | |||
import { getFileNotebook,createNotebook,getCb1Notebook,getCb2Notebook,getFileInfoNotebook,stopNotebook } from "~/apis/modules/notobook"; | |||
import { Message } from "element-ui"; | |||
let timerCb1,timerCb2 | |||
let timerCb1,timerCb2,timerCb3 | |||
let {AppSubUrl} = window.config | |||
const finalState = [ | |||
"STOPPED", | |||
@@ -253,27 +253,28 @@ export default { | |||
if(this.activeLoadFirst){ | |||
this.loading = true | |||
} | |||
getFileNotebook().then((res)=>{ | |||
if(res.data.code==0){ | |||
this.notebookInfo = res.data | |||
}else{ | |||
Message.error(res.data.message) | |||
} | |||
this.loading = false | |||
this.activeLoadFirst = false | |||
}).catch((err)=>{ | |||
Message.error(err) | |||
this.loading = false | |||
this.activeLoadFirst = false | |||
}) | |||
getFileNotebook().then((res)=>{ | |||
if(res.data.code==0){ | |||
this.notebookInfo = res.data | |||
}else{ | |||
Message.error(res.data.message) | |||
} | |||
this.loading = false | |||
this.activeLoadFirst = false | |||
}).catch((err)=>{ | |||
Message.error(err) | |||
this.loading = false | |||
this.activeLoadFirst = false | |||
}) | |||
}, | |||
getCb1NotebookInfo(path,id,index){ | |||
getCb1NotebookInfo(path,id,index,data){ | |||
getCb1Notebook(path,id).then((res)=>{ | |||
if(res.status===200){ | |||
if(res.data.JobStatus==="RUNNING"){ | |||
this.btnStatus[index]=2 | |||
this.deubgUrlGpu = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/cloudbrain/${id}/debug` | |||
this.deubgUrlGpuStop = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/cloudbrain/${id}/stop` | |||
let fileData = {job_id:id,...data} | |||
timerCb3 = setInterval(() => { | |||
setTimeout(this.getFileInfoReadyNotebook(fileData,index), 0) | |||
}, 5000) | |||
clearInterval(timerCb1) | |||
} | |||
if(finalState.includes(res.data.JobStatus)){ | |||
@@ -281,23 +282,53 @@ export default { | |||
clearInterval(timerCb1) | |||
} | |||
} | |||
}) | |||
}).catch((err)=>{ | |||
this.btnStatus[index]=0 | |||
clearInterval(timerCb1) | |||
Message.error(err) | |||
}) | |||
}, | |||
getCb2NotebookInfo(path,id){ | |||
getCb2NotebookInfo(path,id,data){ | |||
getCb2Notebook(path,id).then((res)=>{ | |||
if(res.status===200){ | |||
if(res.data.JobStatus==="RUNNING"){ | |||
this.btnStatus[2]=2 | |||
this.deubgUrlNpu = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/modelarts/notebook/${id}/debug` | |||
this.deubgUrlNpuStop = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/modelarts/notebook/${id}/stop` | |||
clearInterval(timerCb2) | |||
if(res.data.JobStatus==="RUNNING"){ | |||
let fileData = {job_id:id,...data} | |||
timerCb3 = setInterval(() => { | |||
setTimeout(this.getFileInfoReadyNotebook(fileData,2), 0) | |||
}, 5000) | |||
clearInterval(timerCb2) | |||
} | |||
if(finalState.includes(res.data.JobStatus)){ | |||
this.btnStatus[2] = 0 | |||
clearInterval(timerCb2) | |||
} | |||
} | |||
}) | |||
}).catch((err)=>{ | |||
this.btnStatus[index]=0 | |||
clearInterval(timerCb2) | |||
Message.error(err) | |||
}) | |||
}, | |||
getFileInfoReadyNotebook(data,index){ | |||
getFileInfoNotebook(data).then((res)=>{ | |||
console.log(res) | |||
if(res.data.code===0){ | |||
if(index===2){ | |||
this.btnStatus[2]=2 | |||
this.deubgUrlNpu = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/modelarts/notebook/${data.job_id}/debug?file=${this.fileInfo.owner_name}/${this.fileInfo.project_name}/${this.fileInfo.file}` | |||
this.deubgUrlNpuStop = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/modelarts/notebook/${data.job_id}/stop` | |||
}else{ | |||
this.btnStatus[index]=2 | |||
this.deubgUrlGpu = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/cloudbrain/${data.job_id}/debug?file=${this.fileInfo.owner_name}/${this.fileInfo.project_name}/${this.fileInfo.file}` | |||
this.deubgUrlGpuStop = `${AppSubUrl}/${this.fileInfo.sign_name}/${this.notebookInfo.projectName}/cloudbrain/${data.job_id}/stop` | |||
} | |||
clearInterval(timerCb3) | |||
} | |||
}).catch((err)=>{ | |||
this.btnStatus[index]=0 | |||
clearInterval(timerCb3) | |||
Message.error(err) | |||
}) | |||
}, | |||
stopDebug(index){ | |||
this.btnStatus[index]=3 | |||
@@ -323,11 +354,11 @@ export default { | |||
if(res.data.code===0 && res.status===200){ | |||
if(index===2){ | |||
timerCb2 = setInterval(() => { | |||
setTimeout(this.getCb2NotebookInfo(repoPath,res.data.message), 0) | |||
setTimeout(this.getCb2NotebookInfo(repoPath,res.data.message,data), 0) | |||
}, 10000) | |||
}else{ | |||
timerCb1 = setInterval(() => { | |||
setTimeout(this.getCb1NotebookInfo(repoPath,res.data.message,index), 0) | |||
setTimeout(this.getCb1NotebookInfo(repoPath,res.data.message,index,data), 0) | |||
}, 10000) | |||
} | |||
this.alertCb = false | |||