diff --git a/models/attachment.go b/models/attachment.go index 7c95a73dd..40fcd7f08 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -10,6 +10,7 @@ import ( "io" "path" "strings" + "time" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" @@ -104,6 +105,14 @@ func (a *Attachment) IncreaseDownloadCount() error { return nil } +func (a *Attachment) UpdateDatasetUpdateUnix() error { + // Update download count. + if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { + return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) + } + return nil +} + // APIFormat converts models.Attachment to api.Attachment func (a *Attachment) APIFormat() *api.Attachment { return &api.Attachment{ diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index ca8c5071f..34d84555c 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -831,7 +831,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + codeLine := float64(dateRecord.CommitCodeSize) / 1000 + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) + if codeLine >= limitCodeLine { + codeLine = limitCodeLine + } + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 08a354359..2b52ab603 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) { return hasObject, nil } +func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) { + output = &obs.ListPartsOutput{} + partNumberMarker := 0 + for { + temp, err := ObsCli.ListParts(&obs.ListPartsInput{ + Bucket: setting.Bucket, + Key: key, + UploadId: uploadID, + MaxParts: MAX_LIST_PARTS, + PartNumberMarker: partNumberMarker, + }) + if err != nil { + log.Error("ListParts failed:", err.Error()) + return output, err + } + + partNumberMarker = temp.NextPartNumberMarker + log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts)) + + for _, partInfo := range temp.Parts { + output.Parts = append(output.Parts, obs.Part{ + PartNumber: partInfo.PartNumber, + ETag: partInfo.ETag, + }) + } + + if len(temp.Parts) < temp.MaxParts { + break + } else { + continue + } + + break + } + + return output, nil +} + func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") - output, err := ObsCli.ListParts(&obs.ListPartsInput{ - Bucket: setting.Bucket, - Key: key, - UploadId: uploadID, - }) + allParts, err := listAllParts(uuid, uploadID, key) if err != nil { - log.Error("ListParts failed:", err.Error()) + log.Error("listAllParts failed: %v", err) return "", err } var chunks string - for _, partInfo := range output.Parts { + for _, partInfo := range allParts.Parts { chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," } @@ -100,39 +134,14 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") input.UploadId = uploadID - partNumberMarker := 0 - for { - output, err := ObsCli.ListParts(&obs.ListPartsInput{ - Bucket: setting.Bucket, - Key: input.Key, - UploadId: uploadID, - MaxParts: MAX_LIST_PARTS, - PartNumberMarker: partNumberMarker, - }) - if err != nil { - log.Error("ListParts failed:", err.Error()) - return err - } - - partNumberMarker = output.NextPartNumberMarker - log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) - - for _, partInfo := range output.Parts { - input.Parts = append(input.Parts, obs.Part{ - PartNumber: partInfo.PartNumber, - ETag: partInfo.ETag, - }) - } - - if len(output.Parts) < output.MaxParts { - break - } else { - continue - } - - break + allParts, err := listAllParts(uuid, uploadID, input.Key) + if err != nil { + log.Error("listAllParts failed: %v", err) + return err } + input.Parts = allParts.Parts + output, err := ObsCli.CompleteMultipartUpload(input) if err != nil { log.Error("CompleteMultipartUpload failed:", err.Error()) diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index fb38fe067..618b46b3b 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -266,6 +266,16 @@ search_related=related search_maybe=maybe search_ge= +wecome_AI_plt = Welcome to OpenI AI Collaboration Platform! +explore_AI = Explore better AI, come here to find more interesting +datasets = Datasets +repositories = Repositories +use_plt__fuction = To use the AI collaboration functions provided by this platform, such as: hosting code, sharing data, debugging algorithms or training models, start with +provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. +activity = Activity +no_events = There are no events related +or_t = or + [explore] repos = Repositories select_repos = Select the project @@ -2844,7 +2854,7 @@ mirror_sync_create = synced new reference %[2]s to %[2]s at %[3]s from mirror approve_pull_request = `approved %s#%[2]s` reject_pull_request = `suggested changes for %s#%[2]s` -upload_dataset=`upload dataset %s` +upload_dataset=`upload dataset %s` task_gpudebugjob=`created CPU/GPU type debugging task%s` task_npudebugjob=`created NPU type debugging task %s` task_nputrainjob=`created NPU training task%s` diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index ea40f9aa8..392e3ae1c 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -268,6 +268,18 @@ search_related=相关 search_maybe=约为 search_ge=个 +wecome_AI_plt=欢迎来到启智AI协作平台! +explore_AI = 探索更好的AI,来这里发现更有意思的 +datasets = 数据集 +repositories = 项目 +use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 +provide_resoure = 平台目前免费提供CPU、GPU、NPU的算力资源,可进行多种类型的AI任务。 +create_pro = 创建项目 +activity = 活动 +no_events = 还没有与您相关的活动 +or_t = 或 + + [explore] repos=项目 select_repos=精选项目 @@ -2852,7 +2864,7 @@ mirror_sync_create=从镜像同步了新的引用 %[2]s mirror_sync_delete=从镜像同步并从 %[3]s 删除了引用 %[2]s approve_pull_request=`同意了 %s#%[2]s` reject_pull_request=`建议变更 %s#%[2]s` -upload_dataset=`上传了数据集文件 %s` +upload_dataset=`上传了数据集文件 %s` task_gpudebugjob=`创建了CPU/GPU类型调试任务 %s` task_npudebugjob=`创建了NPU类型调试任务 %s` task_nputrainjob=`创建了NPU类型训练任务 %s` diff --git a/public/home/home.js b/public/home/home.js index 478c70f21..d8e423def 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -99,6 +99,11 @@ socket.onmessage = function (e) { console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); continue; } + if(record.OpType == "24"){ + if(record.Content.indexOf("true") != -1){ + continue; + } + } var recordPrefix = getMsg(record); if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ html += recordPrefix + actionName; @@ -162,7 +167,7 @@ socket.onmessage = function (e) { function getTaskLink(record){ var re = getRepoLink(record); if(record.OpType == 24){ - re = re + "/datasets?type=" + record.Content; + re = re + "/datasets"; }else if(record.OpType == 25){ re = re + "/cloudbrain/" + record.Content; }else if(record.OpType == 26){ diff --git a/public/home/search.js b/public/home/search.js index e23d27549..c55d1807c 100644 --- a/public/home/search.js +++ b/public/home/search.js @@ -101,16 +101,20 @@ function initPageInfo(){ function searchItem(type,sortType){ console.log("enter item 2."); - currentSearchKeyword = document.getElementById("keyword_input").value; - if(!isEmpty(currentSearchKeyword)){ - initPageInfo(); - currentSearchTableName = itemType[type]; - currentSearchSortBy = sortBy[sortType]; - currentSearchAscending = sortAscending[sortType]; - OnlySearchLabel =false; - page(currentPage); + if(OnlySearchLabel){ + doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) }else{ - emptySearch(); + currentSearchKeyword = document.getElementById("keyword_input").value; + if(!isEmpty(currentSearchKeyword)){ + initPageInfo(); + currentSearchTableName = itemType[type]; + currentSearchSortBy = sortBy[sortType]; + currentSearchAscending = sortAscending[sortType]; + OnlySearchLabel =false; + page(currentPage); + }else{ + emptySearch(); + } } } @@ -806,17 +810,21 @@ var repoAndOrgEN={ function page(current){ currentPage=current; + startIndex = currentPage -1; + if(startIndex < 1){ + startIndex = 1; + } + endIndex = currentPage + 2; + if(endIndex >= totalPage){ + endIndex = totalPage; + } doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); - } function nextPage(){ currentPage = currentPage+1; console.log("currentPage=" + currentPage); - if(currentPage >= endIndex){ - startIndex=startIndex+1; - endIndex = endIndex +1; - } + page(currentPage); } @@ -824,10 +832,6 @@ function page(current){ console.log("currentPage=" + currentPage); if(currentPage > 1){ currentPage = currentPage-1; - if(currentPage <= startIndex && startIndex > 1){ - startIndex = startIndex -1; - endIndex = endIndex - 1; - } console.log("currentPage=" + (currentPage)); page(currentPage); } @@ -862,7 +866,7 @@ function getYPosition(e){ showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); } else if(goNum<=totalPage){ - page(goNum); + page(parseInt(goNum,10)); } else{ showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); @@ -908,6 +912,11 @@ function getYPosition(e){ } } + if (endIndex < totalPage-1){ + html += "..."; + html += "" + totalPage + ""; + } + if(currentPage >=totalPage){ html += ""; html += "" + getLabel(isZh,"search_last_page") + ""; diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index 3c66a3537..ccedbdf1b 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -11,7 +11,6 @@ import ( "fmt" "mime/multipart" "net/http" - "path" "strconv" "strings" @@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { }) } -func GetObsKey(ctx *context.Context) { - uuid := gouuid.NewV4().String() - key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") - - ctx.JSON(200, map[string]string{ - "uuid": uuid, - "key": key, - "access_key_id": setting.AccessKeyID, - "secret_access_key": setting.SecretAccessKey, - "server": setting.Endpoint, - "bucket": setting.Bucket, - }) -} - func CompleteMultipart(ctx *context.Context) { uuid := ctx.Query("uuid") uploadID := ctx.Query("uploadID") @@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) return } - + attachment.UpdateDatasetUpdateUnix() repository, _ := models.GetRepositoryByID(dataset.RepoID) - notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) - + notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) if attachment.DatasetID != 0 { if isCanDecompress(attachment.Name) { if typeCloudBrain == models.TypeCloudBrainOne { @@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) { }) } -func UpdateMultipart(ctx *context.Context) { - uuid := ctx.Query("uuid") - partNumber := ctx.QueryInt("chunkNumber") - etag := ctx.Query("etag") - - fileChunk, err := models.GetFileChunkByUUID(uuid) - if err != nil { - if models.IsErrFileChunkNotExist(err) { - ctx.Error(404) - } else { - ctx.ServerError("GetFileChunkByUUID", err) - } - return - } - - fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1)) - - err = models.UpdateFileChunk(fileChunk) - if err != nil { - ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) - return - } - - ctx.JSON(200, map[string]string{ - "result_code": "0", - }) -} - func HandleUnDecompressAttachment() { attachs, err := models.GetUnDecompressAttachments() if err != nil { diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 5daf96e78..6b3452656 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -59,6 +59,7 @@ var ( ) const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" +const CLONE_FILE_PREFIX = "file:///" var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) @@ -1142,7 +1143,8 @@ func GetRate(ctx *context.Context) { } func downloadCode(repo *models.Repository, codePath, branchName string) error { - if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil { + //add "file:///" prefix to make the depth valid + if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) return err } @@ -1202,7 +1204,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo return err } - if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil { + if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil { log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) return err } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 318726e8e..b713f385f 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm func NotebookShow(ctx *context.Context) { ctx.Data["PageIsCloudBrain"] = true debugListType := ctx.Query("debugListType") - + if debugListType == "" { + debugListType = "all" + } var ID = ctx.Params(":id") task, err := models.GetCloudbrainByIDWithDeleted(ID) if err != nil { @@ -1027,10 +1029,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { - log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { + log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) trainJobErrorNewDataPrepare(ctx, form) ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) return @@ -1245,9 +1245,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) versionErrorDataPrepare(ctx, form) ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) @@ -1874,9 +1872,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) inferenceJobErrorNewDataPrepare(ctx, form) ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index f8a036feb..995465b09 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -40,8 +40,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac dataHeader := map[string]string{ "A1": ctx.Tr("user.static.id"), "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.UserIndex"), + "C1": ctx.Tr("user.static.UserIndex"), + "D1": ctx.Tr("user.static.codemergecount"), "E1": ctx.Tr("user.static.commitcount"), "F1": ctx.Tr("user.static.issuecount"), "G1": ctx.Tr("user.static.commentcount"), @@ -54,11 +54,11 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac "N1": ctx.Tr("user.static.encyclopediascount"), "O1": ctx.Tr("user.static.createrepocount"), "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.registdate"), - "R1": ctx.Tr("user.static.CloudBrainTaskNum"), - "S1": ctx.Tr("user.static.CloudBrainRunTime"), - "T1": ctx.Tr("user.static.CommitDatasetNum"), - "U1": ctx.Tr("user.static.CommitModelCount"), + "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), + "R1": ctx.Tr("user.static.CloudBrainRunTime"), + "S1": ctx.Tr("user.static.CommitDatasetNum"), + "T1": ctx.Tr("user.static.CommitModelCount"), + "U1": ctx.Tr("user.static.registdate"), "V1": ctx.Tr("user.static.countdate"), } for k, v := range dataHeader { @@ -77,8 +77,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac rows := fmt.Sprint(row) xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) @@ -91,14 +91,12 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - + xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) + xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) + xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "Q"+rows, formatTime[0:len(formatTime)-3]) - - xlsx.SetCellValue(sheetName, "R"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "S"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "U"+rows, userRecord.CommitModelCount) + xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } @@ -241,8 +239,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { dataHeader := map[string]string{ "A1": ctx.Tr("user.static.id"), "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.UserIndex"), + "C1": ctx.Tr("user.static.UserIndex"), + "D1": ctx.Tr("user.static.codemergecount"), "E1": ctx.Tr("user.static.commitcount"), "F1": ctx.Tr("user.static.issuecount"), "G1": ctx.Tr("user.static.commentcount"), @@ -255,11 +253,11 @@ func QueryUserStaticDataPage(ctx *context.Context) { "N1": ctx.Tr("user.static.encyclopediascount"), "O1": ctx.Tr("user.static.createrepocount"), "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.registdate"), - "R1": ctx.Tr("user.static.CloudBrainTaskNum"), - "S1": ctx.Tr("user.static.CloudBrainRunTime"), - "T1": ctx.Tr("user.static.CommitDatasetNum"), - "U1": ctx.Tr("user.static.CommitModelCount"), + "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), + "R1": ctx.Tr("user.static.CloudBrainRunTime"), + "S1": ctx.Tr("user.static.CommitDatasetNum"), + "T1": ctx.Tr("user.static.CommitModelCount"), + "U1": ctx.Tr("user.static.registdate"), "V1": ctx.Tr("user.static.countdate"), } for k, v := range dataHeader { @@ -272,8 +270,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) @@ -286,13 +284,12 @@ func QueryUserStaticDataPage(ctx *context.Context) { xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - + xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) + xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) + xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "Q"+rows, formatTime[0:len(formatTime)-3]) - xlsx.SetCellValue(sheetName, "R"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "S"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "U"+rows, userRecord.CommitModelCount) + xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) formatTime = userRecord.DataDate xlsx.SetCellValue(sheetName, "V"+rows, formatTime) } diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 9df429e8b..e6bda142b 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -608,12 +608,11 @@ func RegisterRoutes(m *macaron.Macaron) { m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) - m.Post("/complete_multipart", repo.CompleteMultipart) - m.Post("/update_chunk", repo.UpdateMultipart) }, reqSignIn) m.Group("/attachments", func() { m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) + m.Post("/complete_multipart", repo.CompleteMultipart) }) m.Group("/attachments", func() { diff --git a/routers/search.go b/routers/search.go index 1cf78666e..fe1643c80 100644 --- a/routers/search.go +++ b/routers/search.go @@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) topicsQuery := elastic.NewMatchQuery("topics", Key) boolQ.Should(topicsQuery) - res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) + res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) } } -func getSort(SortBy string, ascending bool) elastic.Sorter { - var sort elastic.Sorter - sort = elastic.NewScoreSort() - if SortBy != "" { - if SortBy == "default" { - return sort +func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { + sort := make([]elastic.Sorter, 0) + if SortBy == "default" || SortBy == "" { + sort = append(sort, elastic.NewScoreSort()) + if secondSortBy != "" { + log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) + sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) } - return elastic.NewFieldSort(SortBy).Order(ascending) + } else { + sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) } + log.Info("sort size=" + fmt.Sprint(len(sort))) return sort } @@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") boolQ.Should(nameQuery, descriptionQuery, topicsQuery) - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa } else { log.Info("query all content.") //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} - res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) + res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in boolQ.Must(UserOrOrgQuery) } - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, } else { log.Info("query all datasets.") //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} - res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) + res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in boolQ.Must(isIssueQuery) } - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) diff --git a/services/socketwrap/clientManager.go b/services/socketwrap/clientManager.go index 61f356a66..6ffa96933 100755 --- a/services/socketwrap/clientManager.go +++ b/services/socketwrap/clientManager.go @@ -10,7 +10,7 @@ import ( "github.com/elliotchance/orderedmap" ) -var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} +var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} type ClientsManager struct { Clients *orderedmap.OrderedMap diff --git a/templates/repo/cloudbrain/show.tmpl b/templates/repo/cloudbrain/show.tmpl index 7e16c36ff..5fabaa937 100755 --- a/templates/repo/cloudbrain/show.tmpl +++ b/templates/repo/cloudbrain/show.tmpl @@ -187,7 +187,7 @@ td, th { {{.i18n.Tr "repo.cloudbrain"}}
/
- + {{$.i18n.Tr "repo.modelarts.notebook"}}
/
diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl index e8d9b65e3..ba4842f57 100755 --- a/templates/repo/datasets/index.tmpl +++ b/templates/repo/datasets/index.tmpl @@ -281,7 +281,7 @@ {{$.i18n.Tr "dataset.copy_url"}} - {{$.i18n.Tr "dataset.copy_md5"}} + {{if and ($.CanWrite) (eq .DecompressState 1) }} {{$.i18n.Tr "dataset.annotation"}} {{end}} diff --git a/templates/repo/debugjob/index.tmpl b/templates/repo/debugjob/index.tmpl index dbdf6af09..78e05bcb5 100755 --- a/templates/repo/debugjob/index.tmpl +++ b/templates/repo/debugjob/index.tmpl @@ -380,7 +380,7 @@ {{template "base/footer" .}} + + + \ No newline at end of file diff --git a/templates/user/dashboard/feeds.tmpl b/templates/user/dashboard/feeds.tmpl index 614c328c6..171287acb 100755 --- a/templates/user/dashboard/feeds.tmpl +++ b/templates/user/dashboard/feeds.tmpl @@ -71,7 +71,7 @@ {{ $index := index .GetIssueInfos 0}} {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} {{else if eq .GetOpType 24}} - {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}} + {{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}} {{else if eq .GetOpType 25}} {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} {{else if eq .GetOpType 26}} diff --git a/web_src/js/components/MinioUploader.vue b/web_src/js/components/MinioUploader.vue index 8c33608e7..95f5e7e59 100755 --- a/web_src/js/components/MinioUploader.vue +++ b/web_src/js/components/MinioUploader.vue @@ -27,6 +27,7 @@ import createDropzone from '../features/dropzone.js'; const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; const chunkSize = 1024 * 1024 * 64; +const md5ChunkSize = 1024 * 1024 * 1; export default { props:{ @@ -190,10 +191,9 @@ export default { let currentChunk = 0; const time = new Date().getTime(); - // console.log('计算MD5...') this.status = this.dropzoneParams.data('md5-computing'); file.totalChunkCounts = chunks; - loadNext(); + loadMd5Next(); fileReader.onload = (e) => { fileLoaded.call(this, e); @@ -207,13 +207,12 @@ export default { spark.append(e.target.result); // Append array buffer currentChunk++; if (currentChunk < chunks) { - // console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); this.status = `${this.dropzoneParams.data('loading-file')} ${( (currentChunk / chunks) * 100 ).toFixed(2)}% (${currentChunk}/${chunks})`; this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); - loadNext(); + loadMd5Next(); return; } @@ -235,6 +234,13 @@ export default { start + chunkSize >= file.size ? file.size : start + chunkSize; fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); } + + function loadMd5Next() { + const start = currentChunk * chunkSize; + const end = + start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize; + fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); + } }, async computeMD5Success(md5edFile) { diff --git a/web_src/js/components/ObsUploader.vue b/web_src/js/components/ObsUploader.vue deleted file mode 100755 index 7ceb91a27..000000000 --- a/web_src/js/components/ObsUploader.vue +++ /dev/null @@ -1,484 +0,0 @@ - - - - - \ No newline at end of file diff --git a/web_src/js/components/UserAnalysis.vue b/web_src/js/components/UserAnalysis.vue index d7a8f1f81..117984f95 100755 --- a/web_src/js/components/UserAnalysis.vue +++ b/web_src/js/components/UserAnalysis.vue @@ -63,11 +63,6 @@ - - + + @@ -135,15 +135,6 @@ align="center"> - - - - + + +