diff --git a/models/cloudbrain.go b/models/cloudbrain.go index bcaea544a..6797fe332 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -1490,6 +1490,8 @@ type GrampusTasks struct { ReplicaNum int `json:"replicaNum"` Datasets []GrampusDataset `json:"datasets"` Models []GrampusDataset `json:"models"` + Code GrampusDataset `json:"code"` + BootFile string `json:"bootFile"` } type GrampusDataset struct { @@ -2281,8 +2283,7 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er } sess.OrderBy("cloudbrain.created_unix DESC") cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) - if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", - "created_unix", "start_time", "end_time").Table(&Cloudbrain{}).Unscoped().Where(cond). + if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). Find(&cloudbrains); err != nil { return nil, 0, fmt.Errorf("Find: %v", err) } diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go index 58a93d476..48df111a0 100644 --- a/models/cloudbrain_static.go +++ b/models/cloudbrain_static.go @@ -34,6 +34,7 @@ type TaskDetail struct { CardDuration string `json:"CardDuration"` AiCenter string `json:"AiCenter"` FlavorName string `json:"FlavorName"` + WorkServerNum int64 `json:"WorkServerNum"` Spec *Specification `json:"Spec"` } @@ -44,16 +45,45 @@ func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) return x.SQL(countSql).Count() } func GetTodayCloudbrainCount(beginTime time.Time, endTime time.Time) (int64, error) { - countSql := "SELECT count FROM " + + countSql := "SELECT count(*) FROM " + "public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<=" + strconv.FormatInt(endTime.Unix(), 10) return x.SQL(countSql).Count() } +func GetTodayRunningCount(beginTime time.Time, endTime time.Time) (int64, error) { + countSql := "SELECT count(*) FROM " + + "public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + + " and created_unix<=" + strconv.FormatInt(endTime.Unix(), 10) + " and (status='" + string(JobRunning) + "'" + + " or status='" + string(ModelArtsTrainJobInit) + "')" + return x.SQL(countSql).Count() +} +func GetTodayWaitingCount(beginTime time.Time, endTime time.Time) (int64, error) { + countSql := "SELECT count(*) FROM " + + "public.cloudbrain where created_unix >=" + strconv.FormatInt(beginTime.Unix(), 10) + + " and created_unix<=" + strconv.FormatInt(endTime.Unix(), 10) + " and status='" + string(JobWaiting) + "'" + return x.SQL(countSql).Count() +} + func GetCreatorCount() (int64, error) { countSql := "SELECT count(distinct user_id) FROM public.cloudbrain" return x.SQL(countSql).Count() } +func GetCloudbrainTypeCount() ([]map[string]string, error) { + countSql := "SELECT type,count(*) num FROM public.cloudbrain group by type order by num desc" + return x.QueryString(countSql) +} + +func GetCloudbrainStatusCount() ([]map[string]string, error) { + countSql := "SELECT status,count(*) num FROM public.cloudbrain group by status order by num desc" + return x.QueryString(countSql) +} + +func GetCloudbrainTpyeDurationSum() ([]map[string]string, error) { + countSql := "SELECT type,sum(duration) FROM public.cloudbrain group by type order by sum(duration) desc" + return x.QueryString(countSql) +} + func GetRecordBeginTime() ([]*CloudbrainInfo, error) { sess := x.NewSession() defer sess.Close() diff --git a/models/repo.go b/models/repo.go index 6e5ee3ff6..2c4fda39b 100755 --- a/models/repo.go +++ b/models/repo.go @@ -679,7 +679,7 @@ func (repo *Repository) getAssignees(e Engine) (_ []*User, err error) { userIDs[i] = accesses[i].UserID } - if err = e.In("id", userIDs).Find(&users); err != nil { + if err = e.In("id", userIDs).OrderBy("name asc").Find(&users); err != nil { return nil, err } } diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go index d72a7b10e..7bacb46d3 100755 --- a/modules/grampus/grampus.go +++ b/modules/grampus/grampus.go @@ -1,6 +1,7 @@ package grampus import ( + "code.gitea.io/gitea/modules/cloudbrain" "encoding/json" "strings" @@ -73,6 +74,7 @@ type GenerateTrainJobReq struct { PreTrainModelPath string PreTrainModelUrl string Spec *models.Specification + CodeName string } func getEndPoint() string { @@ -102,6 +104,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error centerID, centerName := getCentersParamter(ctx, req) var datasetGrampus, modelGrampus []models.GrampusDataset + var codeGrampus models.GrampusDataset if ProcessorTypeNPU == req.ProcessType { datasetGrampus = getDatasetGrampus(req.DatasetInfos) if len(req.ModelName) != 0 { @@ -114,6 +117,12 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error }, } } + codeGrampus = models.GrampusDataset{ + Name: req.CodeName, + Bucket: setting.Bucket, + EndPoint: getEndPoint(), + ObjectKey: req.CodeObsPath + cloudbrain.DefaultBranchName + ".zip", + } } jobResult, err := createJob(models.CreateGrampusJobRequest{ @@ -130,6 +139,8 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error ReplicaNum: 1, Datasets: datasetGrampus, Models: modelGrampus, + Code: codeGrampus, + BootFile: req.BootFile, }, }, }) diff --git a/modules/repository/create.go b/modules/repository/create.go index 0844c43c3..049fe3abf 100644 --- a/modules/repository/create.go +++ b/modules/repository/create.go @@ -8,6 +8,7 @@ import ( "fmt" "os" "strings" + "text/template" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/git" @@ -34,7 +35,7 @@ func CreateRepository(doer, u *models.User, opts models.CreateRepoOptions) (_ *m Name: opts.Name, Alias: opts.Alias, LowerName: strings.ToLower(opts.Name), - Description: opts.Description, + Description: template.HTMLEscapeString(opts.Description), OriginalURL: opts.OriginalURL, OriginalServiceType: opts.GitServiceType, IsPrivate: opts.IsPrivate, diff --git a/modules/setting/setting.go b/modules/setting/setting.go index f65620a06..7332be5e9 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -394,7 +394,7 @@ var ( DefaultGitTreesPerPage int DefaultMaxBlobSize int64 }{ - EnableSwagger: true, + EnableSwagger: false, SwaggerURL: "", MaxResponseItems: 50, DefaultPagingNum: 30, diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index b81dfc857..282ff9418 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -1058,6 +1058,7 @@ modify_image=Modify Image image_exist=Image name has been used, please use a new one. image_committing=Image is submitting, please try again later. image_commit_fail=Failed to submit image, please try again later. +image_over_20g=Failed to submit image, the size of image can not be over 20GB. image_not_exist=Image does not exits. image_edit_fail=Failed to edit image, please try again later. image_delete_fail=Failed to delete image, please try again later. @@ -1373,7 +1374,9 @@ fork_guest_user = Sign in to fork this repository. copy_link = Copy copy_link_success = Link has been copied copy_link_error = Use ⌘C or Ctrl-C to copy +copy = Copy copied = Copied OK +copied_error = Copied error unwatch = Unwatch watch = Watch unstar = Unstar @@ -3207,11 +3210,11 @@ wrong_specification=You cannot use this specification, please choose another ite resource_use=Resource Occupancy job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). -train_dataset_path_rule = The dataset location is stored in the environment variable data_url, the pre-trained model is storaged in the environment ckpt_url, and the output path is stored in the environment variable train_url. -infer_dataset_path_rule = The dataset location is stored in the environment variable data_url, and the output path is stored in the environment variable result_url. +train_dataset_path_rule = The dataset location is stored in the run parameter data_url, the pre-trained model is storaged in the run parameter ckpt_url, and the output path is stored in the run parameter train_url. +infer_dataset_path_rule = The dataset location is stored in the run parameter data_url, and the output path is stored in the run parameter result_url. view_sample = View sample -inference_output_path_rule = The inference output path is stored in the environment variable result_url. -model_file_path_rule=The model file location is stored in the environment variable ckpt_url +inference_output_path_rule = The inference output path is stored in the run parameter result_url. +model_file_path_rule=The model file location is stored in the run parameter ckpt_url model_file_postfix_rule = The supported format of the model file is [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] model_convert_postfix_rule = The supported format of the model file is [.pth, .pkl, .onnx, .mindir, .ckpt, .pb] delete_task = Delete task @@ -3231,6 +3234,7 @@ point_hr = Point/hr DEBUG = DEBUG SNN4IMAGENET = BENCHMARK BRAINSCORE = BENCHMARK +MODELSAFETY = BENCHMARK TRAIN = TRAIN INFERENCE = INFERENCE BENCHMARK = BENCHMARK @@ -3244,8 +3248,8 @@ Stopped_success_update_status_fail=Succeed in stopping th job, but failed to upd load_code_failed=Fail to load code, please check if the right branch is selected. error.dataset_select = dataset select error:the count exceed the limit or has same name -new_train_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the environment %s, and please put your model into %s then you can download it online -new_train_npu_tooltips = The code is storaged in %s, the pre-trained model is storaged in the environment %s, and please put your model into %s then you can download it online +new_train_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online +new_train_npu_tooltips = The code is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online new_infer_gpu_tooltips = The dataset is stored in %s, the model file is stored in %s, please store the inference output in %s for subsequent downloads. [points] diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index b0bcf708b..979da91d5 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -1059,6 +1059,7 @@ modify_image=修改镜像 image_exist=镜像Tag已被使用,请修改镜像Tag。 image_committing=镜像正在提交中,请稍后再试。 image_commit_fail=提交镜像失败,请稍后再试。 +image_over_20g=提交镜像失败,镜像大小不能超过20GB。 image_not_exist=镜像不存在。 image_edit_fail=编辑镜像失败,请稍后再试。 image_delete_fail=删除镜像失败,请稍后再试。 @@ -1389,7 +1390,9 @@ fork_guest_user=登录并 派生 这个项目。 copy_link=复制链接 copy_link_success=已复制链接 copy_link_error=请按下 ⌘-C 或 Ctrl-C 复制 +copy=复制 copied=复制成功 +copied_error=复制失败 unwatch=取消关注 watch=关注 unstar=取消点赞 @@ -2022,7 +2025,7 @@ settings.githooks=管理 Git 钩子 settings.basic_settings=基本设置 settings.mirror_settings=镜像设置 settings.sync_mirror=同步 -settings.mirror_sync_in_progress=镜像同步正在进行中,请稍后后再试。 +settings.mirror_sync_in_progress=镜像同步正在进行中,请稍后再试。 settings.email_notifications.enable=启用邮件通知 settings.email_notifications.onmention=只在被提到时邮件通知 settings.email_notifications.disable=停用邮件通知 @@ -3225,11 +3228,11 @@ card_type = 卡类型 wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。 job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。 -train_dataset_path_rule = 数据集位置存储在环境变量data_url中,预训练模型存放在环境变量ckpt_url中,训练输出路径存储在环境变量train_url中。 -infer_dataset_path_rule = 数据集位置存储在环境变量data_url中,推理输出路径存储在环境变量result_url中。 +train_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,预训练模型存放在运行参数 ckpt_url 中,训练输出路径存储在运行参数 train_url 中。 +infer_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,推理输出路径存储在运行参数 result_url 中。 view_sample = 查看样例 -inference_output_path_rule = 推理输出路径存储在环境变量result_url中。 -model_file_path_rule = 模型文件位置存储在环境变量ckpt_url中。 +inference_output_path_rule = 推理输出路径存储在运行参数 result_url 中。 +model_file_path_rule = 模型文件位置存储在运行参数 ckpt_url 中。 model_file_postfix_rule = 模型文件支持的格式为 [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt] model_convert_postfix_rule = 模型文件支持的格式为 [.pth, .pkl, .onnx, .mindir, .ckpt, .pb] delete_task = 删除任务 @@ -3249,6 +3252,7 @@ point_hr = 积分/时 DEBUG = 调试任务 SNN4IMAGENET = 评测任务 BRAINSCORE = 评测任务 +MODELSAFETY = 评测任务 TRAIN = 训练任务 INFERENCE = 推理任务 BENCHMARK = 评测任务 @@ -3263,9 +3267,9 @@ load_code_failed=代码加载失败,请确认选择了正确的分支。 error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集 -new_train_gpu_tooltips =训练脚本存储在%s中,数据集存储在%s中,预训练模型存放在环境变量%s中,训练输出请存储在%s中以供后续下载。 -new_train_npu_tooltips =训练脚本存储在%s中,预训练模型存放在环境变量%s中,训练输出请存储在%s中以供后续下载。 -new_infer_gpu_tooltips = 数据集存储在%s中,模型文件存储在%s中,推理输出请存储在%s中以供后续下载。 +new_train_gpu_tooltips = 训练脚本存储在 %s 中,数据集存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。 +new_train_npu_tooltips = 训练脚本存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。 +new_infer_gpu_tooltips = 数据集存储在 %s 中,模型文件存储在 %s 中,推理输出请存储在 %s 中以供后续下载。 [points] points = 积分 diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go index fcb878627..96db935fe 100755 --- a/routers/admin/cloudbrains.go +++ b/routers/admin/cloudbrains.go @@ -98,8 +98,6 @@ func CloudBrains(ctx *context.Context) { ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource - ciTasks[i].Cloudbrain.AiCenter = repo.GetCloudbrainAiCenter(task.Cloudbrain, ctx) - ciTasks[i].Cloudbrain.Cluster = repo.GetCloudbrainCluster(task.Cloudbrain, ctx) } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) @@ -179,6 +177,7 @@ func DownloadCloudBrains(ctx *context.Context) { log.Warn("Can not get cloud brain info", err) continue } + models.LoadSpecs4CloudbrainInfo(pageRecords) for _, record := range pageRecords { for k, v := range allValues(row, record, ctx) { @@ -208,8 +207,11 @@ func allValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[str } func getCloudbrainCardType(rs *models.CloudbrainInfo) string { - _, cardType, _ := repo.GetCloudbrainCardNumAndType(rs.Cloudbrain) - return cardType + if rs.Cloudbrain.Spec != nil { + return rs.Cloudbrain.Spec.AccCardType + } else { + return "" + } } func getRepoPathName(rs *models.CloudbrainInfo) string { diff --git a/routers/admin/resources.go b/routers/admin/resources.go index 8a8c55f86..1d3870a14 100644 --- a/routers/admin/resources.go +++ b/routers/admin/resources.go @@ -221,6 +221,7 @@ func GetResourceSceneList(ctx *context.Context) { func AddResourceScene(ctx *context.Context, req models.ResourceSceneReq) { req.CreatorId = ctx.User.ID + req.ExclusiveOrg = strings.ReplaceAll(req.ExclusiveOrg, " ", "") err := resource.AddResourceScene(req) if err != nil { log.Error("AddResourceScene error. %v", err) @@ -238,6 +239,7 @@ func UpdateResourceScene(ctx *context.Context, req models.ResourceSceneReq) { var err error switch action { case "edit": + req.ExclusiveOrg = strings.ReplaceAll(req.ExclusiveOrg, " ", "") err = resource.UpdateResourceScene(req) case "delete": err = resource.DeleteResourceScene(id) diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 6dd1cb32f..8e1d725ed 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -590,6 +590,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/downloadAll", repo.DownloadCloudBrainBoard) m.Group("/cloudbrain", func() { m.Get("/overview", repo.GetAllCloudbrainsOverview) + m.Get("/overview_duration", repo.GetOverviewDuration) m.Get("/distribution", repo.GetAllCloudbrainsPeriodDistribution) m.Get("/trend", repo.GetAllCloudbrainsTrend) m.Get("/trend_detail_data", repo.GetAllCloudbrainsTrendDetail) diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go index 439190121..7324b6466 100755 --- a/routers/api/v1/repo/cloudbrain.go +++ b/routers/api/v1/repo/cloudbrain.go @@ -577,7 +577,6 @@ func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath s r := bufio.NewReader(reader) for i := 0; i < endLine; i++ { line, error := r.ReadString('\n') - log.Info("line=" + line) if error == io.EOF { log.Info("read file completed.") break diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go index 9d237d8e3..54c0ddc20 100755 --- a/routers/api/v1/repo/cloudbrain_dashboard.go +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -19,6 +19,7 @@ type DateCloudbrainNum struct { CloudOneJobTypeRes map[string]int `json:"cloudOneJobTypeRes"` CloudTwoJobTypeRes map[string]int `json:"cloudTwoJobTypeRes"` IntelligentNetJobTypeRes map[string]int `json:"intelligentNetJobTypeRes"` + CDCenterJobTypeRes map[string]int `json:"cDCenterJobTypeRes"` CloudBrainPeriodNum map[int]int `json:"cloudBrainPeriodNum"` CloudBrainComputeResource map[string]int `json:"cloudBrainComputeResource"` } @@ -53,20 +54,90 @@ func GetAllCloudbrainsOverview(ctx *context.Context) { log.Error("Can not query todayCreatorCount.", err) return } + cloudbrainTypeCount, err := models.GetCloudbrainTypeCount() + log.Info("cloudbrainTypeCount:", cloudbrainTypeCount) + if err != nil { + log.Error("Can not query cloudbrainTypeCount.", err) + return + } + + cloudbrainTpyeDurationSum, err := models.GetCloudbrainTpyeDurationSum() + log.Info("cloudbrainTpyeDurationSum:", cloudbrainTpyeDurationSum) + if err != nil { + log.Error("Can not query cloudbrainTpyeDurationSum.", err) + return + } + + todayCloudbrainCount, err := models.GetTodayCloudbrainCount(beginTime, endTime) + log.Info("todayCloudbrainCount:", todayCloudbrainCount) + if err != nil { + log.Error("Can not query todayCloudbrainCount.", err) + return + } + + todayRunningCount, err := models.GetTodayRunningCount(beginTime, endTime) + log.Info("todayRunningCount:", todayRunningCount) + if err != nil { + log.Error("Can not query todayRunningCount.", err) + return + } + + todayWaitingCount, err := models.GetTodayWaitingCount(beginTime, endTime) + log.Info("todayWaittingCount:", todayWaitingCount) + if err != nil { + log.Error("Can not query todayWaitingCount.", err) + return + } + + todayCompletedCount := todayCloudbrainCount - todayRunningCount - todayWaitingCount + log.Info("todayCompletedCount:", todayCompletedCount) + creatorCount, err := models.GetCreatorCount() if err != nil { log.Error("Can not query creatorCount.", err) return } - todayStatusResult := make(map[string]int) - cloudBrainNum := make(map[int]int) - cloudBrainOneDuration := int64(0) - cloudBrainTwoDuration := int64(0) - intelligentNetDuration := int64(0) - todayNewJobCount := 0 + + ctx.JSON(http.StatusOK, map[string]interface{}{ + "recordBeginTime": recordBeginTime, + "updateTime": now.Unix(), + "todayCreatorCount": todayCreatorCount, + "creatorCount": creatorCount, + "todayRunningCount": todayRunningCount, + "todayCompletedCount": todayCompletedCount, + "todayWaitingCount": todayWaitingCount, + "todayNewJobCount": todayCloudbrainCount, + "cloudbrainTypeCount": cloudbrainTypeCount, + }) +} +func GetOverviewDuration(ctx *context.Context) { + recordCloudbrain, err := models.GetRecordBeginTime() + if err != nil { + log.Error("Can not get recordCloudbrain", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) + return + } + recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix + now := time.Now() + endTime := now page := 1 - pagesize := 1000 + pagesize := 10000 count := pagesize + worker_server_num := 1 + cardNum := 1 + durationAllSum := int64(0) + cardDuSum := int64(0) + + cloudBrainOneCardDuSum := int64(0) + cloudBrainTwoCardDuSum := int64(0) + c2NetCardDuSum := int64(0) + cDNetCardDuSum := int64(0) + + cloudBrainOneDuration := int64(0) + cloudBrainTwoDuration := int64(0) + c2NetDuration := int64(0) + cDCenterDuration := int64(0) + for count == pagesize && count != 0 { cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ ListOptions: models.ListOptions{ @@ -81,72 +152,53 @@ func GetAllCloudbrainsOverview(ctx *context.Context) { ctx.ServerError("Get cloudbrains failed:", err) return } + models.LoadSpecs4CloudbrainInfo(cloudbrains) for _, cloudbrain := range cloudbrains { - if int64(cloudbrain.Cloudbrain.CreatedUnix) >= beginTime.Unix() && int64(cloudbrain.Cloudbrain.CreatedUnix) < endTime.Unix() { - todayNewJobCount += 1 - if _, ok := todayStatusResult[cloudbrain.Status]; !ok { - todayStatusResult[cloudbrain.Status] = 1 - } else { - todayStatusResult[cloudbrain.Status] += 1 - } + if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { + worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber + } else { + worker_server_num = 1 } - - if _, ok := cloudBrainNum[cloudbrain.Cloudbrain.Type]; !ok { - cloudBrainNum[cloudbrain.Cloudbrain.Type] = 1 + if cloudbrain.Cloudbrain.Spec == nil { + cardNum = 1 } else { - cloudBrainNum[cloudbrain.Cloudbrain.Type] += 1 + cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum } - + duration := cloudbrain.Duration + durationSum := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum) if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { - cloudBrainOneDuration = cloudBrainOneDuration + cloudbrain.Cloudbrain.Duration - } - if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { - cloudBrainTwoDuration = cloudBrainTwoDuration + cloudbrain.Cloudbrain.Duration + cloudBrainOneDuration += duration + cloudBrainOneCardDuSum += durationSum + } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { + cloudBrainTwoDuration += duration + cloudBrainTwoCardDuSum += durationSum + } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net { + c2NetDuration += duration + c2NetCardDuSum += durationSum + } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { + cDCenterDuration += duration + cDNetCardDuSum += durationSum } - if cloudbrain.Cloudbrain.Type == models.TypeC2Net { - intelligentNetDuration = intelligentNetDuration + cloudbrain.Cloudbrain.Duration - } - - } - count = len(cloudbrains) - page += 1 - } - statusNameList := []string{string(models.ModelArtsTrainJobCompleted), string(models.JobFailed), string(models.ModelArtsTrainJobInit), - string(models.JobRunning), string(models.ModelArtsStartFailed), string(models.JobStopped), string(models.JobSucceeded), - string(models.JobWaiting), string(models.ModelArtsTrainJobKilled)} - for _, v := range statusNameList { - if _, ok := todayStatusResult[v]; !ok { - todayStatusResult[v] = 0 - } - } - - cloudBrainTypeList := []int{0, 1, 2} - for _, v := range cloudBrainTypeList { - if _, ok := cloudBrainNum[v]; !ok { - cloudBrainNum[v] = 0 + durationAllSum += duration + cardDuSum += durationSum + count = len(cloudbrains) + page += 1 } } - - todayRunningCount := todayStatusResult[string(models.JobRunning)] - todayCompletedCount := todayStatusResult[string(models.ModelArtsTrainJobCompleted)] + todayStatusResult[string(models.JobFailed)] + - todayStatusResult[string(models.ModelArtsStartFailed)] + todayStatusResult[string(models.JobStopped)] + todayStatusResult[string(models.JobSucceeded)] + todayStatusResult[string(models.ModelArtsTrainJobKilled)] - todayWaitingCount := todayStatusResult[string(models.ModelArtsTrainJobInit)] + todayStatusResult[string(models.JobWaiting)] - ctx.JSON(http.StatusOK, map[string]interface{}{ - "recordBeginTime": recordBeginTime, - "updateTime": now.Unix(), - "cloudBrainNum": cloudBrainNum, - "cloudBrainOneDuration": cloudBrainOneDuration, - "cloudBrainTwoDuration": cloudBrainTwoDuration, - "intelligentNetDuration": intelligentNetDuration, - "todayCreatorCount": todayCreatorCount, - "creatorCount": creatorCount, - "todayRunningCount": todayRunningCount, - "todayCompletedCount": todayCompletedCount, - "todayWaitingCount": todayWaitingCount, - "todayNewJobCount": todayNewJobCount, + "cloudBrainOneCardDuSum": cloudBrainOneCardDuSum, + "cloudBrainTwoCardDuSum": cloudBrainTwoCardDuSum, + "c2NetCardDuSum": c2NetCardDuSum, + "cDNetCardDuSum": cDNetCardDuSum, + "cardDuSum": cardDuSum, + + "cloudBrainOneDuration": cloudBrainOneDuration, + "cloudBrainTwoDuration": cloudBrainTwoDuration, + "c2NetDuration": c2NetDuration, + "cDCenterDuration": cDCenterDuration, + "durationSum": durationAllSum, }) } @@ -500,6 +552,7 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { cloudOneJobTypeRes := make(map[string]int) cloudTwoJobTypeRes := make(map[string]int) intelligentNetJobTypeRes := make(map[string]int) + cDCenterJobTypeRes := make(map[string]int) cloudBrainPeriodNum := make(map[int]int) cloudBrainComputeResource := make(map[string]int) beginTimeTemp := beginTime.Unix() @@ -508,9 +561,9 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { } page := 1 - pagesize := 1000 + pagesize := 10000 count := pagesize - //Each time a maximum of 1000 pieces of data are detected to the memory, batch processing + //Each time a maximum of 10000 pieces of data are detected to the memory, batch processing for count == pagesize && count != 0 { cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ ListOptions: models.ListOptions{ @@ -548,6 +601,13 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { intelligentNetJobTypeRes[cloudbrain.JobType] += 1 } } + if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { + if _, ok := cDCenterJobTypeRes[cloudbrain.JobType]; !ok { + cDCenterJobTypeRes[cloudbrain.JobType] = 1 + } else { + cDCenterJobTypeRes[cloudbrain.JobType] += 1 + } + } if _, ok := cloudBrainPeriodNum[cloudbrain.Cloudbrain.Type]; !ok { cloudBrainPeriodNum[cloudbrain.Cloudbrain.Type] = 1 @@ -577,8 +637,11 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { if _, ok := intelligentNetJobTypeRes[v]; !ok { intelligentNetJobTypeRes[v] = 0 } + if _, ok := cDCenterJobTypeRes[v]; !ok { + cDCenterJobTypeRes[v] = 0 + } } - cloudBrainTypeList := []int{0, 1, 2} + cloudBrainTypeList := []int{0, 1, 2, 3} for _, v := range cloudBrainTypeList { if _, ok := cloudBrainPeriodNum[v]; !ok { cloudBrainPeriodNum[v] = 0 @@ -592,77 +655,30 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) { } } - cloudOneJobTypeRes["EVALUATION"] = cloudOneJobTypeRes[string(models.JobTypeBenchmark)] + cloudOneJobTypeRes[string(models.JobTypeSnn4imagenet)] + cloudOneJobTypeRes[string(models.JobTypeBrainScore)] - cloudTwoJobTypeRes["EVALUATION"] = cloudTwoJobTypeRes[string(models.JobTypeBenchmark)] + cloudTwoJobTypeRes[string(models.JobTypeSnn4imagenet)] + cloudTwoJobTypeRes[string(models.JobTypeBrainScore)] - intelligentNetJobTypeRes["EVALUATION"] = intelligentNetJobTypeRes[string(models.JobTypeBenchmark)] + intelligentNetJobTypeRes[string(models.JobTypeSnn4imagenet)] + intelligentNetJobTypeRes[string(models.JobTypeBrainScore)] + cloudOneJobTypeRes["EVALUATION"] = cloudBrainPeriodNum[0] - cloudOneJobTypeRes[string(models.JobTypeTrain)] - cloudOneJobTypeRes[string(models.JobTypeInference)] - cloudOneJobTypeRes[string(models.JobTypeDebug)] + cloudTwoJobTypeRes["EVALUATION"] = cloudBrainPeriodNum[1] - cloudTwoJobTypeRes[string(models.JobTypeTrain)] - cloudTwoJobTypeRes[string(models.JobTypeInference)] - cloudTwoJobTypeRes[string(models.JobTypeDebug)] + intelligentNetJobTypeRes["EVALUATION"] = cloudBrainPeriodNum[2] - intelligentNetJobTypeRes[string(models.JobTypeTrain)] - intelligentNetJobTypeRes[string(models.JobTypeInference)] - intelligentNetJobTypeRes[string(models.JobTypeDebug)] + cDCenterJobTypeRes["EVALUATION"] = cloudBrainPeriodNum[3] - cDCenterJobTypeRes[string(models.JobTypeTrain)] - cDCenterJobTypeRes[string(models.JobTypeInference)] - cDCenterJobTypeRes[string(models.JobTypeDebug)] ctx.JSON(http.StatusOK, map[string]interface{}{ "cloudOneJobTypeRes": cloudOneJobTypeRes, "cloudTwoJobTypeRes": cloudTwoJobTypeRes, "intelligentNetJobTypeRes": intelligentNetJobTypeRes, + "cDCenterJobTypeRes": cDCenterJobTypeRes, "cloudBrainPeriodNum": cloudBrainPeriodNum, "cloudBrainComputeResource": cloudBrainComputeResource, }) } func GetCloudbrainsStatusAnalysis(ctx *context.Context) { - recordCloudbrain, err := models.GetRecordBeginTime() + cloudbrainStatusCount, err := models.GetCloudbrainStatusCount() + log.Info("cloudbrainStatusCount:", cloudbrainStatusCount) if err != nil { - log.Error("Can not get recordCloudbrain", err) - ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) + log.Error("Can not query cloudbrainStatusCount.", err) return } - recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix - endTime := time.Now() - cloudBrainStatusResult := make(map[string]int) - cloudBrainStatusAnalysis := make(map[string]int) - totalCount := 0 - - page := 1 - pagesize := 1000 - count := pagesize - for count == pagesize && count != 0 { - cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ - ListOptions: models.ListOptions{ - Page: page, - PageSize: pagesize, - }, - Type: models.TypeCloudBrainAll, - BeginTimeUnix: int64(recordBeginTime), - EndTimeUnix: endTime.Unix(), - }) - if err != nil { - ctx.ServerError("Get cloudbrains failed:", err) - return - } - for _, cloudbrain := range cloudbrains { - if _, ok := cloudBrainStatusResult[cloudbrain.Status]; !ok { - cloudBrainStatusResult[cloudbrain.Status] = 1 - } else { - cloudBrainStatusResult[cloudbrain.Status] += 1 - } - } - count = len(cloudbrains) - totalCount = totalCount + count - page += 1 - } - - var jobStatuses []string - jobStatuses = append(jobStatuses, string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobFailed), string(models.ModelArtsRunning), string(models.ModelArtsTrainJobCompleted), - string(models.ModelArtsStarting), string(models.ModelArtsRestarting), string(models.ModelArtsStartFailed), - string(models.ModelArtsStopping), string(models.ModelArtsStopped), string(models.JobSucceeded)) - jobStatusesCount := 0 - for _, v := range jobStatuses { - if _, ok := cloudBrainStatusResult[v]; !ok { - cloudBrainStatusAnalysis[v] = 0 - } else { - cloudBrainStatusAnalysis[v] = cloudBrainStatusResult[v] - } - jobStatusesCount = jobStatusesCount + cloudBrainStatusResult[v] - } - cloudBrainStatusAnalysis["OTHER"] = totalCount - jobStatusesCount ctx.JSON(http.StatusOK, map[string]interface{}{ - "cloudBrainStatusResult": cloudBrainStatusAnalysis, + "cloudbrainStatusCount": cloudbrainStatusCount, }) } @@ -738,7 +754,6 @@ func GetCloudbrainsDetailData(ctx *context.Context) { tasks := []models.TaskDetail{} for i, task := range ciTasks { ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource - var taskDetail models.TaskDetail taskDetail.ID = ciTasks[i].Cloudbrain.ID taskDetail.JobID = ciTasks[i].Cloudbrain.JobID @@ -758,11 +773,12 @@ func GetCloudbrainsDetailData(ctx *context.Context) { taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias } - taskDetail.CardNum, taskDetail.CardType, _ = repo.GetCloudbrainCardNumAndType(ciTasks[i].Cloudbrain) + if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { + taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) + } else { + taskDetail.WorkServerNum = 1 + } taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) - taskDetail.AiCenter = repo.GetCloudbrainAiCenter(ciTasks[i].Cloudbrain, ctx) - taskDetail.FlavorName, _ = repo.GetCloudbrainFlavorName(ciTasks[i].Cloudbrain) - taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { @@ -787,17 +803,6 @@ func GetCloudbrainsDetailData(ctx *context.Context) { }) } -func getCloudbrainAiCenter(task models.Cloudbrain, ctx *context.Context) string { - if task.Type == models.TypeCloudBrainOne { - return ctx.Tr("repo.cloudbrain1") - } else if task.Type == models.TypeCloudBrainTwo { - return ctx.Tr("repo.cloudbrain2") - } else if task.Type == models.TypeC2Net { - return task.AiCenter - } - return "" -} - func GetCloudbrainsCreateHoursData(ctx *context.Context) { recordCloudbrain, err := models.GetRecordBeginTime() if err != nil { @@ -958,11 +963,12 @@ func GetRunningTop(ctx *context.Context) { }) } -func getCloudbrainCount(beginTime time.Time, endTime time.Time, cloudbrains []*models.CloudbrainInfo) (map[string]int, map[string]int, map[string]int, map[int]int, map[string]int) { +func getCloudbrainCount(beginTime time.Time, endTime time.Time, cloudbrains []*models.CloudbrainInfo) (map[string]int, map[string]int, map[string]int, map[string]int, map[int]int, map[string]int) { cloudOneJobTypeRes := make(map[string]int) cloudTwoJobTypeRes := make(map[string]int) intelligentNetJobTypeRes := make(map[string]int) + cDCenterJobTypeRes := make(map[string]int) cloudBrainPeriodNum := make(map[int]int) cloudBrainComputeResource := make(map[string]int) for _, cloudbrain := range cloudbrains { @@ -1005,7 +1011,7 @@ func getCloudbrainCount(beginTime time.Time, endTime time.Time, cloudbrains []*m } jobTypeList := []string{"DEBUG", "BENCHMARK", "INFERENCE", "TRAIN", "SNN4IMAGENET", "BRAINSCORE"} - cloudBrainTypeList := []int{0, 1, 2} + cloudBrainTypeList := []int{0, 1, 2, 3} for _, v := range jobTypeList { if _, ok := cloudOneJobTypeRes[v]; !ok { cloudOneJobTypeRes[v] = 0 @@ -1016,14 +1022,17 @@ func getCloudbrainCount(beginTime time.Time, endTime time.Time, cloudbrains []*m if _, ok := intelligentNetJobTypeRes[v]; !ok { intelligentNetJobTypeRes[v] = 0 } + if _, ok := cDCenterJobTypeRes[v]; !ok { + cDCenterJobTypeRes[v] = 0 + } } for _, v := range cloudBrainTypeList { if _, ok := cloudBrainPeriodNum[v]; !ok { cloudBrainPeriodNum[v] = 0 } } - cloudBrainPeriodNum[-1] = cloudBrainPeriodNum[0] + cloudBrainPeriodNum[1] + cloudBrainPeriodNum[2] - return cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource + cloudBrainPeriodNum[-1] = cloudBrainPeriodNum[0] + cloudBrainPeriodNum[1] + cloudBrainPeriodNum[2] + cloudBrainPeriodNum[3] + return cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cDCenterJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource } func getDayCloudbrainNum(beginTime time.Time, endTime time.Time) ([]DateCloudbrainNum, error) { @@ -1040,12 +1049,13 @@ func getDayCloudbrainNum(beginTime time.Time, endTime time.Time) ([]DateCloudbra } dayCloudbrainNum := make([]DateCloudbrainNum, 0) for endTimeTemp.Before(endTime) || endTimeTemp.Equal(endTime) { - cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(beginTime, endTimeTemp, cloudbrains) + cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cDCenterJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(beginTime, endTimeTemp, cloudbrains) dayCloudbrainNum = append(dayCloudbrainNum, DateCloudbrainNum{ Date: beginTime.Format("2006/01/02"), CloudOneJobTypeRes: cloudOneJobTypeRes, CloudTwoJobTypeRes: cloudTwoJobTypeRes, IntelligentNetJobTypeRes: intelligentNetJobTypeRes, + CDCenterJobTypeRes: cDCenterJobTypeRes, CloudBrainPeriodNum: cloudBrainPeriodNum, CloudBrainComputeResource: cloudBrainComputeResource, }) @@ -1075,12 +1085,13 @@ func getMonthCloudbrainNum(beginTime time.Time, endTime time.Time) ([]DateCloudb return nil, err } for endTimeTemp.Before(endTime) || endTimeTemp.Equal(endTime) { - cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(beginTime, endTimeTemp, cloudbrains) + cloudOneJobTypeRes, cloudTwoJobTypeRes, intelligentNetJobTypeRes, cDCenterJobTypeRes, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(beginTime, endTimeTemp, cloudbrains) monthCloudbrainNum = append(monthCloudbrainNum, DateCloudbrainNum{ Date: beginTime.Format("2006/01"), CloudOneJobTypeRes: cloudOneJobTypeRes, CloudTwoJobTypeRes: cloudTwoJobTypeRes, IntelligentNetJobTypeRes: intelligentNetJobTypeRes, + CDCenterJobTypeRes: cDCenterJobTypeRes, CloudBrainPeriodNum: cloudBrainPeriodNum, CloudBrainComputeResource: cloudBrainComputeResource, }) @@ -1113,7 +1124,7 @@ func getDayCloudbrainInfo(beginTime time.Time, endTime time.Time) ([]DateCloudbr dayCloudbrainInfo := make([]DateCloudbrainInfo, 0) count := 0 for beginTime.Before(endTimeTemp) || beginTime.Equal(endTimeTemp) { - _, _, _, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(endTimeTemp, endTime, cloudbrains) + _, _, _, _, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(endTimeTemp, endTime, cloudbrains) dayCloudbrainInfo = append(dayCloudbrainInfo, DateCloudbrainInfo{ Date: endTimeTemp.Format("2006/01/02"), CloudBrainPeriodNum: cloudBrainPeriodNum, @@ -1144,7 +1155,7 @@ func getMonthCloudbrainInfo(beginTime time.Time, endTime time.Time) ([]DateCloud dayCloudbrainInfo := make([]DateCloudbrainInfo, 0) count := 0 for beginTime.Before(endTimeTemp) || beginTime.Equal(endTimeTemp) || (endTimeTemp.Before(beginTime) && beginTime.Before(endTime)) { - _, _, _, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(endTimeTemp, endTime, cloudbrains) + _, _, _, _, cloudBrainPeriodNum, cloudBrainComputeResource := getCloudbrainCount(endTimeTemp, endTime, cloudbrains) dayCloudbrainInfo = append(dayCloudbrainInfo, DateCloudbrainInfo{ Date: endTimeTemp.Format("2006/01"), CloudBrainPeriodNum: cloudBrainPeriodNum, @@ -1205,6 +1216,7 @@ func DownloadCloudBrainBoard(ctx *context.Context) { log.Warn("Can not get cloud brain info", err) continue } + models.LoadSpecs4CloudbrainInfo(pageRecords) for _, record := range pageRecords { for k, v := range allCloudbrainValues(row, record, ctx) { @@ -1235,10 +1247,9 @@ func allCloudbrainHeader(ctx *context.Context) map[string]string { "H1": ctx.Tr("cloudbrain.card_duration"), "I1": ctx.Tr("repo.modelarts.train_job.start_time"), "J1": ctx.Tr("repo.modelarts.train_job.end_time"), "K1": ctx.Tr("repo.modelarts.computing_resources"), "L1": ctx.Tr("cloudbrain.card_type"), - "M1": ctx.Tr("repo.grampus.train_job.ai_center"), "N1": ctx.Tr("cloudbrain.resource_specification"), - "O1": ctx.Tr("repo.cloudbrain_creator"), "P1": ctx.Tr("repo.repo_name"), "Q1": ctx.Tr("repo.cloudbrain_task_name"), - "R1": ctx.Tr("repo.modelarts.deletetime")} - + "M1": ctx.Tr("repo.modelarts.train_job.amount_of_compute_node"), "N1": ctx.Tr("repo.grampus.train_job.ai_center"), + "O1": ctx.Tr("cloudbrain.resource_specification"), "P1": ctx.Tr("repo.cloudbrain_creator"), "Q1": ctx.Tr("repo.repo_name"), + "R1": ctx.Tr("repo.cloudbrain_task_name"), "S1": ctx.Tr("repo.modelarts.deletetime")} } func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[string]string { return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): getCloudbrainType(rs, ctx), getCellName("C", row): rs.Status, getCellName("D", row): rs.JobType, @@ -1246,9 +1257,16 @@ func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Contex getCellName("G", row): rs.TrainJobDuration, getCellName("H", row): repo.GetCloudbrainCardDuration(rs.Cloudbrain), getCellName("I", row): getBrainStartTime(rs), getCellName("J", row): getBrainEndTime(rs), getCellName("K", row): rs.ComputeResource, getCellName("L", row): getCloudbrainCardType(rs), - getCellName("M", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), getCellName("N", row): getCloudbrainFlavorName(rs), - getCellName("O", row): rs.Name, getCellName("P", row): getBrainRepo(rs), - getCellName("Q", row): rs.JobName, getCellName("R", row): getBrainDeleteTime(rs), + getCellName("M", row): getWorkServerNum(rs), getCellName("N", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), + getCellName("O", row): getCloudbrainFlavorName(rs), getCellName("P", row): rs.Name, + getCellName("Q", row): getBrainRepo(rs), getCellName("R", row): rs.JobName, getCellName("S", row): getBrainDeleteTime(rs), + } +} +func getWorkServerNum(rs *models.CloudbrainInfo) string { + if rs.Cloudbrain.WorkServerNumber >= 1 { + return fmt.Sprint(rs.Cloudbrain.WorkServerNumber) + } else { + return "1" } } func getBrainRepo(rs *models.CloudbrainInfo) string { @@ -1287,11 +1305,14 @@ func getCloudbrainType(rs *models.CloudbrainInfo, ctx *context.Context) string { } } func getCloudbrainCardType(rs *models.CloudbrainInfo) string { - _, cardType, _ := repo.GetCloudbrainCardNumAndType(rs.Cloudbrain) - return cardType + if rs.Cloudbrain.Spec != nil { + return rs.Cloudbrain.Spec.AccCardType + } else { + return "" + } } func getCloudbrainFlavorName(rs *models.CloudbrainInfo) string { - flavorName, _ := repo.GetCloudbrainFlavorName(rs.Cloudbrain) + flavorName := repo.GetCloudbrainFlavorName(rs.Cloudbrain) return flavorName } diff --git a/routers/api/v1/repo/fork.go b/routers/api/v1/repo/fork.go index a753f192d..9416035e7 100644 --- a/routers/api/v1/repo/fork.go +++ b/routers/api/v1/repo/fork.go @@ -7,6 +7,7 @@ package repo import ( "fmt" + "html/template" "net/http" "code.gitea.io/gitea/models" @@ -118,7 +119,7 @@ func CreateFork(ctx *context.APIContext, form api.CreateForkOption) { forker = org } - fork, err := repo_service.ForkRepository(ctx.User, forker, repo, repo.Name, repo.Description, repo.Alias) + fork, err := repo_service.ForkRepository(ctx.User, forker, repo, repo.Name, template.HTMLEscapeString(repo.Description), repo.Alias) if err != nil { ctx.Error(http.StatusInternalServerError, "ForkRepository", err) return diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go index 957028fc6..5b358b83b 100644 --- a/routers/repo/ai_model_manage.go +++ b/routers/repo/ai_model_manage.go @@ -8,6 +8,7 @@ import ( "net/http" "net/url" "path" + "regexp" "strings" "code.gitea.io/gitea/models" @@ -85,6 +86,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio accuracy["Precision"] = "" accuracyJson, _ := json.Marshal(accuracy) log.Info("accuracyJson=" + string(accuracyJson)) + aiTask.ContainerIp = "" aiTaskJson, _ := json.Marshal(aiTask) model := &models.AiModelManage{ @@ -635,6 +637,7 @@ func ShowSingleModel(ctx *context.Context) { userNameMap := queryUserName(userIds) for _, model := range models { + removeIpInfo(model) value := userNameMap[model.UserId] if value != nil { model.UserName = value.Name @@ -644,6 +647,13 @@ func ShowSingleModel(ctx *context.Context) { ctx.JSON(http.StatusOK, models) } +func removeIpInfo(model *models.AiModelManage) { + reg, _ := regexp.Compile(`[[:digit:]]{1,3}\.[[:digit:]]{1,3}\.[[:digit:]]{1,3}\.[[:digit:]]{1,3}`) + taskInfo := model.TrainTaskInfo + taskInfo = reg.ReplaceAllString(taskInfo, "") + model.TrainTaskInfo = taskInfo +} + func queryUserName(intSlice []int64) map[int64]*models.User { keys := make(map[int64]string) uniqueElements := []int64{} @@ -677,6 +687,7 @@ func ShowOneVersionOtherModel(ctx *context.Context) { userNameMap := queryUserName(userIds) for _, model := range aimodels { + removeIpInfo(model) value := userNameMap[model.UserId] if value != nil { model.UserName = value.Name @@ -793,6 +804,7 @@ func ShowModelPageInfo(ctx *context.Context) { userNameMap := queryUserName(userIds) for _, model := range modelResult { + removeIpInfo(model) value := userNameMap[model.UserId] if value != nil { model.UserName = value.Name @@ -866,6 +878,7 @@ func QueryModelListForPredict(ctx *context.Context) { nameMap := make(map[string][]*models.AiModelManage) for _, model := range modelResult { + removeIpInfo(model) if _, value := nameMap[model.Name]; !value { models := make([]*models.AiModelManage, 0) models = append(models, model) diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 73aa9b791..dd44be6b2 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -760,8 +760,8 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics oldStatus := task.Status task.Status = taskRes.TaskStatuses[0].State + task.ContainerIp = "" task.ContainerID = taskRes.TaskStatuses[0].ContainerID - task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) if task.DeletedAt.IsZero() { //normal record @@ -1147,6 +1147,8 @@ func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrain } else if models.IsErrorImageCommitting(err) { ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing"))) + } else if isOver20GError(err) { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_over_20g"))) } else { ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail"))) } @@ -1156,6 +1158,10 @@ func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrain ctx.JSON(200, models.BaseOKMessage) } +func isOver20GError(err error) bool { + return strings.Contains(err.Error(), "over max image size 20GB") +} + func checkTopics(Topics string) ([]string, string) { var topics = make([]string, 0) var topicsStr = strings.TrimSpace(Topics) @@ -2813,16 +2819,11 @@ func GetBenchmarkTypes(ctx *context.Context) *models.BenchmarkTypes { } func GetCloudbrainAiCenter(task models.Cloudbrain, ctx *context.Context) string { - if task.Type == models.TypeCloudBrainOne { - return ctx.Tr("repo.cloudbrain1") - } else if task.Type == models.TypeCloudBrainTwo { - return ctx.Tr("repo.cloudbrain2") - } else if task.Type == models.TypeCDCenter { - return ctx.Tr("repo.cdCenter") - } else if task.Type == models.TypeC2Net { - return getCutStringAiCenterByAiCenter(task.AiCenter) + if task.Spec != nil { + return task.Spec.AiCenterName + } else { + return "" } - return "" } func getCutStringAiCenterByAiCenter(aiCenter string) string { if aiCenter == "" { @@ -2841,8 +2842,24 @@ func GetCloudbrainCluster(task models.Cloudbrain, ctx *context.Context) string { return "" } func GetCloudbrainCardDuration(task models.Cloudbrain) string { - cardNum, _, _ := GetCloudbrainCardNumAndType(task) - cardDuration := models.ConvertDurationToStr(int64(cardNum) * task.Duration) + cardNum := int(0) + spec, err := resource.GetCloudbrainSpec(task.ID) + if err != nil { + log.Info("error:" + err.Error()) + return "" + } + if spec != nil { + cardNum = spec.AccCardsNum + } else { + cardNum = 1 + } + var workServerNumber int64 + if task.WorkServerNumber >= 1 { + workServerNumber = int64(task.WorkServerNumber) + } else { + workServerNumber = 1 + } + cardDuration := models.ConvertDurationToStr(workServerNumber * int64(cardNum) * task.Duration) return cardDuration } func GetCloudbrainWaitTime(task models.Cloudbrain) string { @@ -2869,114 +2886,12 @@ func GetCloudbrainWaitTime(task models.Cloudbrain) string { } return waitTime } - -func GetCloudbrainCardNumAndType(task models.Cloudbrain) (int, string, error) { - if !models.SpecsMapInitFlag { - models.InitCloudbrainOneResourceSpecMap() - } - if !models.GpuInfosMapInitFlag { - models.InitCloudbrainOneGpuInfoMap() - } - flavorName, err := GetCloudbrainFlavorName(task) - if err != nil { - return 0, "", nil - } - return getCardNumAndTypeByFlavorname(flavorName) -} - -func getCardNumAndTypeByFlavorname(FlavorName string) (int, string, error) { - if FlavorName == "" { - return 0, "", nil - } else { - var beginIndex = strings.Index(FlavorName, ":") - var lastIndex = strings.LastIndex(FlavorName, ":") - var endIndex = strings.Index(FlavorName, "*") - if endIndex >= (beginIndex+1) && lastIndex >= (endIndex+1) { - cardNum, err := strconv.Atoi(strings.TrimSpace(FlavorName[beginIndex+1 : endIndex])) - if err != nil { - log.Error("strconv.Atoi failed: %v", err) - return 0, "", err - } - cardType := strings.TrimSpace(FlavorName[endIndex+1 : lastIndex]) - return cardNum, cardType, err - } - return 0, "", nil - } -} - -func GetCloudbrainFlavorName(task models.Cloudbrain) (string, error) { - if task.Type == models.TypeCloudBrainOne { - resourceSpec, gpuInfo, err := getCloudBrainOneResourceSpec(task) - if err != nil { - log.Info("getCloudBrainOneResourceSpec err:", err) - return "", err - } else { - if resourceSpec == nil || gpuInfo == nil { - err := errors.New("resourceSpec or gpuInfo is nil") - return "", err - } else { - CloudbrainOneFlavorName := "GPU:" + strconv.Itoa(resourceSpec.GpuNum) + "*Nvidia-" + gpuInfo.Value + - " | CPU:" + strconv.Itoa(resourceSpec.CpuNum) + "核" + strconv.Itoa(resourceSpec.MemMiB) + "MB" - return CloudbrainOneFlavorName, nil - } - } - } else if (task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeC2Net || task.Type == models.TypeCDCenter) && task.FlavorName != "" { - replaceFlavorName := strings.ReplaceAll(task.FlavorName, ":", ":") - return replaceFlavorName, nil - } else if (task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter) && task.FlavorName == "" && task.FlavorCode != "" { - cloudbrainTwoFlavorName := getFlavorNameByFlavorCode(task.FlavorCode) - return cloudbrainTwoFlavorName, nil - } else if task.Type == models.TypeCloudBrainTwo && task.JobType == string(models.JobTypeDebug) && task.FlavorName == "" && task.FlavorCode == "" { - tasks, err := models.GetModelartsReDebugTaskByJobId(task.JobID) - if err != nil { - return "", err - } - if len(tasks) >= 1 { - return getFlavorNameByFlavorCode(tasks[0].FlavorCode), nil - } - return "", nil - } - return "", nil -} - -func getCloudBrainOneResourceSpec(task models.Cloudbrain) (*models.ResourceSpec, *models.GpuInfo, error) { - gpuQueueDefault := "openidebug" - if task.GpuQueue != "" { - gpuQueueDefault = task.GpuQueue - } - if task.ResourceSpecId >= 0 { - if task.JobType == string(models.JobTypeTrain) { - if models.CloudbrainTrainResourceSpecsMap[task.ResourceSpecId] != nil { - return models.CloudbrainTrainResourceSpecsMap[task.ResourceSpecId], models.CloudbrainTrainGpuInfosMap[gpuQueueDefault], nil - } else { - return models.CloudbrainSpecialResourceSpecsMap[task.ResourceSpecId], models.CloudbrainSpecialGpuInfosMap[gpuQueueDefault], nil - } - } else if task.JobType == string(models.JobTypeDebug) { - if models.CloudbrainDebugResourceSpecsMap[task.ResourceSpecId] != nil { - return models.CloudbrainDebugResourceSpecsMap[task.ResourceSpecId], models.CloudbrainDebugGpuInfosMap[gpuQueueDefault], nil - } else { - return models.CloudbrainSpecialResourceSpecsMap[task.ResourceSpecId], models.CloudbrainSpecialGpuInfosMap[gpuQueueDefault], nil - } - } else if task.JobType == string(models.JobTypeInference) { - return models.CloudbrainInferenceResourceSpecsMap[task.ResourceSpecId], models.CloudbrainInferenceGpuInfosMap[gpuQueueDefault], nil - } else if task.JobType == string(models.JobTypeBenchmark) || task.JobType == string(models.JobTypeSnn4imagenet) || task.JobType == string(models.JobTypeBrainScore) { - return models.CloudbrainBenchmarkResourceSpecsMap[task.ResourceSpecId], models.CloudbrainBenchmarkGpuInfosMap[gpuQueueDefault], nil - } +func GetCloudbrainFlavorName(task models.Cloudbrain) string { + if task.Spec != nil { + flavorName := task.Spec.ComputeResource + ":" + fmt.Sprint(task.Spec.AccCardsNum) + "*" + task.Spec.AccCardType + + ",内存:" + strconv.FormatInt(int64(task.Spec.MemGiB), 10) + "GB,共享内存:" + strconv.FormatInt(int64(task.Spec.ShareMemGiB), 10) + "GB" + return flavorName } else { - err := errors.New("ResourceSpecId is null") - return nil, nil, err - } - return nil, nil, nil -} -func getFlavorNameByFlavorCode(flavorCode string) string { - index := strings.LastIndex(flavorCode, ".") - cardNum, err := strconv.Atoi(strings.TrimSpace(flavorCode[index+1 : len(flavorCode)])) - if err != nil { - log.Error("strconv.Atoi failed: %v", err) return "" } - cloudbrainTwoFlavorName := "Ascend:" + strings.TrimSpace(flavorCode[index+1:len(flavorCode)]) + - "*Ascend-910(" + strconv.Itoa(cardNum*32) + "GB)|ARM:" + strconv.Itoa(cardNum*24) + - "核" + strconv.Itoa(cardNum*256) + "GB" - return cloudbrainTwoFlavorName } diff --git a/routers/repo/editor.go b/routers/repo/editor.go index 40edc4767..b350343db 100644 --- a/routers/repo/editor.go +++ b/routers/repo/editor.go @@ -303,7 +303,7 @@ func editFilePost(ctx *context.Context, form auth.EditRepoFileForm, isNewFile bo } if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(models.UnitTypePullRequests) { - ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + ctx.Repo.BranchName + "..." + form.NewBranchName) + ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName)) } else { ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName) + "/" + util.PathEscapeSegments(form.TreePath)) } @@ -475,7 +475,7 @@ func DeleteFilePost(ctx *context.Context, form auth.DeleteRepoFileForm) { ctx.Flash.Success(ctx.Tr("repo.editor.file_delete_success", ctx.Repo.TreePath)) if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(models.UnitTypePullRequests) { - ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + ctx.Repo.BranchName + "..." + form.NewBranchName) + ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName)) } else { treePath := filepath.Dir(ctx.Repo.TreePath) if treePath == "." { @@ -686,7 +686,7 @@ func UploadFilePost(ctx *context.Context, form auth.UploadRepoFileForm) { } if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(models.UnitTypePullRequests) { - ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + ctx.Repo.BranchName + "..." + form.NewBranchName) + ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName)) } else { ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName) + "/" + util.PathEscapeSegments(form.TreePath)) } diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index abf64281a..b78bdebd3 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -713,6 +713,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain DatasetNames: datasetNames, DatasetInfos: datasetInfos, Spec: spec, + CodeName: strings.ToLower(repo.Name), } if form.ModelName != "" { //使用预训练模型训练 req.ModelName = form.ModelName @@ -837,6 +838,7 @@ func GrampusTrainJobShow(ctx *context.Context) { ctx.NotFound(ctx.Req.URL.RequestURI(), nil) return } + task.ContainerIp = "" if task.DeletedAt.IsZero() { //normal record result, err := grampus.GetJob(task.JobID) @@ -976,8 +978,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScript, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject) //download code & dataset if processorType == grampus.ProcessorTypeNPU { - commandDownload := "./downloader_for_obs " + setting.Bucket + " " + codeRemotePath + " " + grampus.CodeArchiveName + ";" - command += commandDownload + //no need to download code & dataset by internet } else if processorType == grampus.ProcessorTypeGPU { commandDownload := "./downloader_for_minio " + setting.Grampus.Env + " " + codeRemotePath + " " + grampus.CodeArchiveName + " '" + dataRemotePath + "' '" + datasetName + "'" commandDownload = processPretrainModelParameter(pretrainModelPath, pretrainModelFileName, commandDownload) @@ -986,8 +987,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo //unzip code & dataset if processorType == grampus.ProcessorTypeNPU { - commandUnzip := "cd " + workDir + "code;unzip -q master.zip;" - command += commandUnzip + //no need to process } else if processorType == grampus.ProcessorTypeGPU { unZipDatasetCommand := generateDatasetUnzipCommand(datasetName) commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand @@ -1024,7 +1024,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo var commandCode string if processorType == grampus.ProcessorTypeNPU { - commandCode = "/bin/bash /home/work/run_train_for_openi.sh " + workDir + "code/" + strings.ToLower(repoName) + "/" + bootFile + " /tmp/log/train.log" + paramCode + ";" + commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py /tmp/log/train.log" + paramCode + ";" } else if processorType == grampus.ProcessorTypeGPU { if pretrainModelFileName != "" { paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 5487231a2..591f0dd7c 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -985,7 +985,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) //isSaveParam := form.IsSaveParam repo := ctx.Repo.Repository codeLocalPath := setting.JobPath + jobName + modelarts.CodePath - codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath + codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath + VersionOutputPath + "/" outputObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.OutputPath + VersionOutputPath + "/" logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/" // dataPath := "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/" @@ -1108,8 +1108,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) return } - // parentDir := VersionOutputPath + "/" - if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil { + parentDir := VersionOutputPath + "/" + if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { // if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil { log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err) trainJobNewDataPrepare(ctx) @@ -1795,7 +1795,7 @@ func TrainJobShow(ctx *context.Context) { datasetList = append(datasetList, GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)) VersionListTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain) VersionListTasks[i].CanModify = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain) - + VersionListTasks[i].ContainerIp = "" //add spec s, err := resource.GetCloudbrainSpec(task.Cloudbrain.ID) if err != nil { diff --git a/routers/repo/repo.go b/routers/repo/repo.go index 03d2d832a..2c8c2f45b 100644 --- a/routers/repo/repo.go +++ b/routers/repo/repo.go @@ -12,6 +12,7 @@ import ( "path" "regexp" "strings" + "text/template" "unicode/utf8" "code.gitea.io/gitea/modules/validation" @@ -212,7 +213,7 @@ func CreatePost(ctx *context.Context, form auth.CreateRepoForm) { opts := models.GenerateRepoOptions{ Name: form.RepoName, Alias: form.Alias, - Description: form.Description, + Description: template.HTMLEscapeString(form.Description), Private: form.Private, GitContent: form.GitContent, Topics: form.Topics, diff --git a/routers/repo/setting.go b/routers/repo/setting.go index 11efdf275..5fcfb2287 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -8,6 +8,7 @@ package repo import ( "errors" "fmt" + "html/template" "io/ioutil" "net/url" "regexp" @@ -129,7 +130,7 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { // In case it's just a case change. repo.Name = newRepoName repo.LowerName = strings.ToLower(newRepoName) - repo.Description = form.Description + repo.Description = template.HTMLEscapeString(form.Description) repo.Website = form.Website repo.IsTemplate = form.Template repo.Alias = newAlias diff --git a/routers/repo/setting_protected_branch.go b/routers/repo/setting_protected_branch.go index ab0fd77ee..f1ea17528 100644 --- a/routers/repo/setting_protected_branch.go +++ b/routers/repo/setting_protected_branch.go @@ -5,6 +5,7 @@ package repo import ( + "code.gitea.io/gitea/modules/util" "fmt" "strings" "time" @@ -192,7 +193,7 @@ func SettingsProtectedBranchPost(ctx *context.Context, f auth.ProtectBranchForm) } if f.RequiredApprovals < 0 { ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_approvals_min")) - ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, branch)) + ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, util.PathEscapeSegments(branch))) } var whitelistUsers, whitelistTeams, mergeWhitelistUsers, mergeWhitelistTeams, approvalsWhitelistUsers, approvalsWhitelistTeams []int64 @@ -263,7 +264,7 @@ func SettingsProtectedBranchPost(ctx *context.Context, f auth.ProtectBranchForm) return } ctx.Flash.Success(ctx.Tr("repo.settings.update_protect_branch_success", branch)) - ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, branch)) + ctx.Redirect(fmt.Sprintf("%s/settings/branches/%s", ctx.Repo.RepoLink, util.PathEscapeSegments(branch))) } else { if protectBranch != nil { if err := ctx.Repo.Repository.DeleteProtectedBranch(protectBranch.ID); err != nil { diff --git a/routers/user/home.go b/routers/user/home.go index 78e6c00e9..991c80328 100755 --- a/routers/user/home.go +++ b/routers/user/home.go @@ -23,7 +23,6 @@ import ( "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - "code.gitea.io/gitea/routers/repo" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" @@ -841,9 +840,6 @@ func Cloudbrains(ctx *context.Context) { ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource - ciTasks[i].Cloudbrain.AiCenter = repo.GetCloudbrainAiCenter(task.Cloudbrain, ctx) - ciTasks[i].Cloudbrain.Cluster = repo.GetCloudbrainCluster(task.Cloudbrain, ctx) - } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl index 14951dd69..4c500b5e6 100755 --- a/templates/admin/cloudbrain/list.tmpl +++ b/templates/admin/cloudbrain/list.tmpl @@ -89,7 +89,7 @@