diff --git a/models/user.go b/models/user.go index a423a843b..dcbb9be3d 100755 --- a/models/user.go +++ b/models/user.go @@ -1768,7 +1768,6 @@ func (opts *SearchUserOptions) toConds() builder.Cond { if !opts.IsActive.IsNone() { cond = cond.And(builder.Eq{"is_active": opts.IsActive.IsTrue()}) } - return cond } @@ -1780,12 +1779,15 @@ func SearchUsers(opts *SearchUserOptions) (users []*User, _ int64, _ error) { if err != nil { return nil, 0, fmt.Errorf("Count: %v", err) } - + orderby := opts.OrderBy.String() if len(opts.OrderBy) == 0 { - opts.OrderBy = SearchOrderByAlphabetically + orderby = SearchOrderByAlphabetically.String() + lowerKeyword := strings.ToLower(opts.Keyword) + if len(opts.Keyword) > 0 { + orderby = " CASE when lower_name='" + lowerKeyword + "' then 0 when strpos(lower_name,'" + lowerKeyword + "')>0 then 1 else 2 END ASC" + } } - - sess := x.Where(cond).OrderBy(opts.OrderBy.String()) + sess := x.Where(cond).OrderBy(orderby) if opts.Page != 0 { sess = opts.setSessionPagination(sess) } diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index a36bd4736..0c67a569a 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -412,7 +412,16 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi func QueryDataForUserDefineFromDb(opts *UserBusinessAnalysisQueryOptions, key string) ([]*UserBusinessAnalysis, int64) { statictisSess := xStatistic.NewSession() defer statictisSess.Close() - cond := "data_date='" + key + "'" + + var cond = builder.NewCond() + cond = cond.And( + builder.Eq{"data_date": key}, + ) + if len(opts.UserName) > 0 { + cond = cond.And( + builder.Like{"name", opts.UserName}, + ) + } allCount, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) if err == nil { if allCount > 0 { diff --git a/modules/storage/minio_ext.go b/modules/storage/minio_ext.go index 4c0cbac55..05b692335 100755 --- a/modules/storage/minio_ext.go +++ b/modules/storage/minio_ext.go @@ -179,30 +179,82 @@ func GetOneLevelAllObjectUnderDirMinio(bucket string, prefixRootPath string, rel output, err := core.ListObjects(bucket, Prefix, "", "", 1000) fileInfos := make([]FileInfo, 0) prefixLen := len(Prefix) + fileMap := make(map[string]bool, 0) if err == nil { for _, val := range output.Contents { + log.Info("val key=" + val.Key) var isDir bool var fileName string if val.Key == Prefix { continue } - if strings.HasSuffix(val.Key, "/") { + fileName = val.Key[prefixLen:] + log.Info("fileName =" + fileName) + files := strings.Split(fileName, "/") + if fileMap[files[0]] { + continue + } else { + fileMap[files[0]] = true + } + ParenDir := relativePath + fileName = files[0] + if len(files) > 1 { isDir = true - fileName = val.Key[prefixLen : len(val.Key)-1] - relativePath += val.Key[prefixLen:] + ParenDir += fileName + "/" } else { isDir = false - fileName = val.Key[prefixLen:] } + fileInfo := FileInfo{ ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), FileName: fileName, Size: val.Size, IsDir: isDir, - ParenDir: relativePath, + ParenDir: ParenDir, } fileInfos = append(fileInfos, fileInfo) + + // log.Info("val key=" + val.Key) + // var isDir bool + // var fileName string + // if val.Key == Prefix { + // continue + // } + + // fileName = val.Key[prefixLen:] + // log.Info("fileName =" + fileName) + // files := strings.Split(fileName, "/") + // if fileMap[files[0]] { + // continue + // } else { + // fileMap[files[0]] = true + // } + // ParenDir := relativePath + // fileName = files[0] + // if len(files) > 1 { + // isDir = true + // ParenDir += fileName + "/" + // } else { + // isDir = false + // } + + // // if strings.HasSuffix(val.Key, "/") { + // // isDir = true + // // fileName = val.Key[prefixLen : len(val.Key)-1] + // // relativePath += val.Key[prefixLen:] + // // } else { + // // isDir = false + // // fileName = val.Key[prefixLen:] + // // } + // fileInfo := FileInfo{ + // ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), + // FileName: fileName, + // Size: val.Size, + // IsDir: isDir, + // ParenDir: relativePath, + // } + // fileInfos = append(fileInfos, fileInfo) } return fileInfos, err } else { diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 29b7998f7..2cb3af927 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -395,29 +395,6 @@ func GetOneLevelAllObjectUnderDir(bucket string, prefixRootPath string, relative } else { isDir = false } - - // if strings.Contains(val.Key[prefixLen:len(val.Key)-1], "/") { - - // files := strings.Split(fileName, "/") - // fileName = files[0] - // isDir = true - // if fileMap[files[0]] { - // continue - // } else { - // fileMap[files[0]] = true - // } - // } else { - // if strings.HasSuffix(val.Key, "/") { - // isDir = true - // fileName = val.Key[prefixLen : len(val.Key)-1] - // relativePath += val.Key[prefixLen:] - // } else { - // isDir = false - // fileName = val.Key[prefixLen:] - // } - // fileMap[fileName] = true - // } - fileInfo := FileInfo{ ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), FileName: fileName, diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 259679b9b..d1e61d242 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -252,10 +252,10 @@ page_dev_env_desc2_title=Model Management and Sharing page_dev_env_desc2_desc=Associate the model with the code version, you can adjust the model in different ways based on the historical version of the code and save the results. The trained model can be open and shared, so that more people can use the model to test and give feedback. page_dev_env_desc3_title=Once Configuration, Multiple Reuse page_dev_env_desc3_desc=Provide execution environment sharing, Once Configuration, Multiple Reuse. Lower the threshold of model development, and avoid spending repetitive time configuring complex environments. -page_dev_yunlao=PengCheng Cloudbrain Open Source Collaboration -page_dev_yunlao_desc1=The platform has been connected with Pengcheng Cloudbrain and can use the rich computing resources of Pengcheng Cloudbrain to complete AI development tasks. -page_dev_yunlao_desc2=Pengcheng Cloudbrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure is composed of GPU server equipped with NVIDIA Tesla V100 and Atlas 900 AI cluster equipped with Kunpeng and Ascend processors. -page_dev_yunlao_desc3=Developers can freely choose the corresponding computing resources according to their needs, and can test the adaptability, performance, stability of the model in different hardware environments. +page_dev_yunlao=OpenI AI Collaboration Platform +page_dev_yunlao_desc1=OpenI AI collaboration platform has been connected with Pengcheng CloudBrain and China computing network (c2net) in phase I, and can use the rich computing resources of Pengcheng CloudBrain and China computing network to complete AI development tasks. +page_dev_yunlao_desc2=Pengcheng CloudBrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure is composed of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors; China computing network (c2net) phase I can realize the high-speed network interconnection between different AI computing centers, realize the reasonable scheduling of computing power and the flexible allocation of resources. At present, it has been connected to 11 intelligent computing centers, with a total scale of 1924p. +page_dev_yunlao_desc3=OpenI AI collaboration platform has been connected to Pengcheng Cloud Computing Institute, Chengdu Intelligent Computing Center, Zhongyuan Intelligent Computing Center, Hefei brain and other nodes. Developers can freely choose the corresponding computing resources according to their use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. page_dev_yunlao_desc4=If your model requires more computing resources, you can also apply for it separately. page_dev_yunlao_apply=Apply Separately diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index e98a15f98..a39118c0a 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -254,11 +254,11 @@ page_dev_env_desc2_title=模型管理与共享 page_dev_env_desc2_desc=将模型与代码版本建立关联,可以基于代码历史版本,使用不同的方式调整模型,并将结果保存下来;训练好的模型可以开放共享,让更多人的使用模型测试并提出反馈 page_dev_env_desc3_title=一次配置,多次使用 page_dev_env_desc3_desc=提供执行环境共享,一次配置,多次使用,降低模型开发门槛,避免花费重复的时间配置复杂的环境 -page_dev_yunlao=鹏城云脑开源协同 -page_dev_yunlao_desc1=平台已经与鹏城云脑打通,可以利用鹏城云脑的丰富算力资源,完成AI开发任务 -page_dev_yunlao_desc2=鹏城云脑现有AI算力100P FLOPS@FP16(每秒十亿亿次半精度浮点计算),主要硬件基础设施由搭载英伟达Tesla V100 的GPU服务器和搭载鲲鹏、昇腾处理器的Atlas 900 AI集群构成 -page_dev_yunlao_desc3=开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等 -page_dev_yunlao_desc4=如果您的模型需要更多的计算资源,也可以单独申请 +page_dev_yunlao=启智AI协作平台 +page_dev_yunlao_desc1=启智AI协作平台已经与鹏城云脑、中国算力网(C2Net)一期打通,可以利用鹏城云脑和中国算力网的丰富算力资源,完成AI开发任务。 +page_dev_yunlao_desc2=鹏城云脑现有AI算力100P FLOPS@FP16(每秒十亿亿次半精度浮点计算),主要硬件基础设施由搭载英伟达Tesla V100 和A100 的GPU服务器,以及搭载鲲鹏、昇腾处理器的Atlas 900 AI集群构成;中国算力网(C2Net)一期可实现不同人工智能计算中心之间高速网络互联,实现算力合理调度和资源弹性分配,目前已接入11家智算中心,算力总规模1924P。 +page_dev_yunlao_desc3=启智AI协作平台已接入其中的鹏城云计算所、成都智算中心、中原智算中心、合肥类脑等节点,开发者可以根据使用需求,自由选择相应计算资源,可以测试模型在不同硬件环境下的适配能力、性能、稳定性等。 +page_dev_yunlao_desc4=如果您的模型需要更多的计算资源,也可以单独申请。 page_dev_yunlao_apply=单独申请 search=搜索 diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index cd098d8c3..76bf9b076 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -911,7 +911,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo } } - + ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) ctx.Data["task"] = task labelName := strings.Fields(task.LabelName) ctx.Data["LabelName"] = labelName diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index 36a27b088..a05fae40c 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -696,7 +696,7 @@ func GrampusTrainJobShow(ctx *context.Context) { taskList := make([]*models.Cloudbrain, 0) taskList = append(taskList, task) ctx.Data["version_list_task"] = taskList - + ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) ctx.Data["displayJobName"] = task.DisplayJobName diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 130d9f5ab..8f43690ed 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -304,34 +304,7 @@ func NotebookShow(ctx *context.Context) { datasetDownload := make([]models.DatasetDownload, 0) if ctx.IsSigned { if task.Uuid != "" && task.UserID == ctx.User.ID { - uuidList := strings.Split(task.Uuid, ";") - for _, uuidStr := range uuidList { - attachment, err := models.GetAttachmentByUUID(uuidStr) - if err != nil { - log.Error("GetAttachmentByUUID failed:%v", err.Error()) - return - } - dataset, err := models.GetDatasetByID(attachment.DatasetID) - if err != nil { - log.Error("GetDatasetByID failed:%v", err.Error()) - return - } - repo, err := models.GetRepositoryByID(dataset.RepoID) - if err != nil { - log.Error("GetRepositoryByID failed:%v", err.Error()) - return - } - datasetDownload = append(datasetDownload, models.DatasetDownload{ - DatasetName: attachment.Name, - DatasetDownloadLink: attachment.S3DownloadURL(), - RepositoryLink: repo.Link() + "/datasets", - }) - - } - // datasetName, err := GetDatasetNameByUUID(task.Uuid) - // if err == nil { - // task.DatasetName = datasetName - // } + datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, true) } } user, err := models.GetUserByID(task.UserID) @@ -377,6 +350,39 @@ func NotebookShow(ctx *context.Context) { ctx.HTML(200, tplModelArtsNotebookShow) } +func GetCloudBrainDataSetInfo(uuid string, isNeedDown bool) []models.DatasetDownload { + datasetDownload := make([]models.DatasetDownload, 0) + + uuidList := strings.Split(uuid, ";") + for _, uuidStr := range uuidList { + attachment, err := models.GetAttachmentByUUID(uuidStr) + if err != nil { + log.Error("GetAttachmentByUUID failed:%v", err.Error()) + return datasetDownload + } + dataset, err := models.GetDatasetByID(attachment.DatasetID) + if err != nil { + log.Error("GetDatasetByID failed:%v", err.Error()) + return datasetDownload + } + repo, err := models.GetRepositoryByID(dataset.RepoID) + if err != nil { + log.Error("GetRepositoryByID failed:%v", err.Error()) + return datasetDownload + } + url := "" + if isNeedDown { + url = attachment.S3DownloadURL() + } + datasetDownload = append(datasetDownload, models.DatasetDownload{ + DatasetName: attachment.Name, + DatasetDownloadLink: url, + RepositoryLink: repo.Link() + "/datasets", + }) + } + return datasetDownload +} + func setShowSpecBySpecialPoolConfig(ctx *context.Context, findSpec bool, task *models.Cloudbrain) { modelarts.InitSpecialPool() if modelarts.SpecialPools != nil && !findSpec { @@ -1764,7 +1770,7 @@ func TrainJobShow(ctx *context.Context) { return } ctx.Data["canNewJob"] = canNewJob - + datasetList := make([][]models.DatasetDownload, 0) //将运行参数转化为epoch_size = 3, device_target = Ascend的格式 for i, task := range VersionListTasks { @@ -1787,7 +1793,7 @@ func TrainJobShow(ctx *context.Context) { } else { VersionListTasks[i].Parameters = "" } - + datasetList = append(datasetList, GetCloudBrainDataSetInfo(task.Uuid, false)) VersionListTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain) VersionListTasks[i].CanModify = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain) } @@ -1799,6 +1805,7 @@ func TrainJobShow(ctx *context.Context) { ctx.Data["displayJobName"] = VersionListTasks[0].DisplayJobName ctx.Data["version_list_task"] = VersionListTasks ctx.Data["version_list_count"] = VersionListCount + ctx.Data["datasetList"] = datasetList ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, &VersionListTasks[0].Cloudbrain) ctx.HTML(http.StatusOK, tplModelArtsTrainJobShow) } @@ -2524,7 +2531,7 @@ func InferenceJobShow(ctx *context.Context) { ctx.Data["displayJobName"] = task.DisplayJobName ctx.Data["task"] = task ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) - + ctx.Data["datasetDownload"] = GetCloudBrainDataSetInfo(task.Uuid, false) tempUids := []int64{} tempUids = append(tempUids, task.UserID) JobCreater, err := models.GetUserNamesByIDs(tempUids) diff --git a/routers/search.go b/routers/search.go index 628350424..72bf97bf4 100644 --- a/routers/search.go +++ b/routers/search.go @@ -314,7 +314,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) if err == nil { esresult := makeRepoResult(res, Key, OnlyReturnNum, language) - setForkRepoOrder(esresult) + setForkRepoOrder(esresult, SortBy) resultObj.Total = resultObj.PrivateTotal + esresult.Total isNeedSort := false if len(resultObj.Result) > 0 { @@ -347,7 +347,10 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa } } -func setForkRepoOrder(esresult *SearchRes) { +func setForkRepoOrder(esresult *SearchRes, SortBy string) { + if SortBy == "default" || SortBy == "" { + return + } forkidMap := make(map[string]int, 0) for index, re := range esresult.Result { if re["fork_id"] != nil { diff --git a/templates/home.tmpl b/templates/home.tmpl index b90b20a28..9f52eb19e 100755 --- a/templates/home.tmpl +++ b/templates/home.tmpl @@ -90,7 +90,7 @@

智算网络

-

人工智能算力网络推进联盟已接入10家智算中心,算力总规模1542P

+

人工智能算力网络推进联盟已接入11家智算中心,算力总规模1924P

diff --git a/templates/repo/cloudbrain/inference/show.tmpl b/templates/repo/cloudbrain/inference/show.tmpl index 055e403bd..f4e57e685 100644 --- a/templates/repo/cloudbrain/inference/show.tmpl +++ b/templates/repo/cloudbrain/inference/show.tmpl @@ -468,7 +468,9 @@
- {{.DatasetName}} + {{range $m ,$n := $.datasetDownload}} + {{.DatasetName}} + {{end}}
diff --git a/templates/repo/cloudbrain/show.tmpl b/templates/repo/cloudbrain/show.tmpl index 0c53f7fce..d111fe123 100755 --- a/templates/repo/cloudbrain/show.tmpl +++ b/templates/repo/cloudbrain/show.tmpl @@ -412,7 +412,9 @@
- {{.DatasetName}} + {{range $m ,$n := $.datasetDownload}} + {{.DatasetName}} + {{end}}
diff --git a/templates/repo/cloudbrain/trainjob/show.tmpl b/templates/repo/cloudbrain/trainjob/show.tmpl index e4d8ff346..c22c557a4 100644 --- a/templates/repo/cloudbrain/trainjob/show.tmpl +++ b/templates/repo/cloudbrain/trainjob/show.tmpl @@ -428,7 +428,9 @@
- {{.DatasetName}} + {{range $m ,$n := $.datasetDownload}} + {{.DatasetName}} + {{end}}
diff --git a/templates/repo/grampus/trainjob/show.tmpl b/templates/repo/grampus/trainjob/show.tmpl index 5d4321736..b669d112f 100755 --- a/templates/repo/grampus/trainjob/show.tmpl +++ b/templates/repo/grampus/trainjob/show.tmpl @@ -428,7 +428,9 @@
- {{.DatasetName}} + {{range $m ,$n := $.datasetDownload}} + {{.DatasetName}} + {{end}}
diff --git a/templates/repo/modelarts/inferencejob/show.tmpl b/templates/repo/modelarts/inferencejob/show.tmpl index 14bb5cf24..7be35a581 100644 --- a/templates/repo/modelarts/inferencejob/show.tmpl +++ b/templates/repo/modelarts/inferencejob/show.tmpl @@ -409,7 +409,9 @@ td, th {
- {{.DatasetName}} + {{range $m ,$n := $.datasetDownload}} + {{.DatasetName}} + {{end}}
diff --git a/templates/repo/modelarts/notebook/show.tmpl b/templates/repo/modelarts/notebook/show.tmpl index 2b2541900..ae9ab9cd8 100755 --- a/templates/repo/modelarts/notebook/show.tmpl +++ b/templates/repo/modelarts/notebook/show.tmpl @@ -448,7 +448,7 @@ {{range $.datasetDownload}} - {{.DatasetName}} + {{.DatasetName}} {{.DatasetDownloadLink}} 复制链接 diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index e61fafcdd..ea556e854 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -460,7 +460,13 @@
- {{.DatasetName}} + {{range $m ,$n := $.datasetList}} + {{if eq $k $m}} + {{range $f ,$g := $n}} + {{.DatasetName}} + {{end}} + {{end}} + {{end}}
diff --git a/templates/repo/modelmanage/showinfo.tmpl b/templates/repo/modelmanage/showinfo.tmpl index a4577fe20..25ec72dea 100644 --- a/templates/repo/modelmanage/showinfo.tmpl +++ b/templates/repo/modelmanage/showinfo.tmpl @@ -125,6 +125,14 @@ + 训练任务 + + + + + + + {{$.i18n.Tr "repo.modelarts.code_version"}} @@ -197,6 +205,7 @@ {{template "base/footer" .}}