@@ -10,6 +10,7 @@ import ( | |||||
"io" | "io" | ||||
"path" | "path" | ||||
"strings" | "strings" | ||||
"time" | |||||
"code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
"code.gitea.io/gitea/modules/obs" | "code.gitea.io/gitea/modules/obs" | ||||
@@ -104,6 +105,14 @@ func (a *Attachment) IncreaseDownloadCount() error { | |||||
return nil | return nil | ||||
} | } | ||||
func (a *Attachment) UpdateDatasetUpdateUnix() error { | |||||
// Update download count. | |||||
if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { | |||||
return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) | |||||
} | |||||
return nil | |||||
} | |||||
// APIFormat converts models.Attachment to api.Attachment | // APIFormat converts models.Attachment to api.Attachment | ||||
func (a *Attachment) APIFormat() *api.Attachment { | func (a *Attachment) APIFormat() *api.Attachment { | ||||
return &api.Attachment{ | return &api.Attachment{ | ||||
@@ -831,7 +831,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 | |||||
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) | ||||
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) | ||||
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) | ||||
result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||||
codeLine := float64(dateRecord.CommitCodeSize) / 1000 | |||||
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100) | |||||
if codeLine >= limitCodeLine { | |||||
codeLine = limitCodeLine | |||||
} | |||||
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) | |||||
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) | ||||
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) | ||||
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) | ||||
@@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) { | |||||
return hasObject, nil | return hasObject, nil | ||||
} | } | ||||
func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) { | |||||
output = &obs.ListPartsOutput{} | |||||
partNumberMarker := 0 | |||||
for { | |||||
temp, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
Bucket: setting.Bucket, | |||||
Key: key, | |||||
UploadId: uploadID, | |||||
MaxParts: MAX_LIST_PARTS, | |||||
PartNumberMarker: partNumberMarker, | |||||
}) | |||||
if err != nil { | |||||
log.Error("ListParts failed:", err.Error()) | |||||
return output, err | |||||
} | |||||
partNumberMarker = temp.NextPartNumberMarker | |||||
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts)) | |||||
for _, partInfo := range temp.Parts { | |||||
output.Parts = append(output.Parts, obs.Part{ | |||||
PartNumber: partInfo.PartNumber, | |||||
ETag: partInfo.ETag, | |||||
}) | |||||
} | |||||
if len(temp.Parts) < temp.MaxParts { | |||||
break | |||||
} else { | |||||
continue | |||||
} | |||||
break | |||||
} | |||||
return output, nil | |||||
} | |||||
func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { | func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { | ||||
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
Bucket: setting.Bucket, | |||||
Key: key, | |||||
UploadId: uploadID, | |||||
}) | |||||
allParts, err := listAllParts(uuid, uploadID, key) | |||||
if err != nil { | if err != nil { | ||||
log.Error("ListParts failed:", err.Error()) | |||||
log.Error("listAllParts failed: %v", err) | |||||
return "", err | return "", err | ||||
} | } | ||||
var chunks string | var chunks string | ||||
for _, partInfo := range output.Parts { | |||||
for _, partInfo := range allParts.Parts { | |||||
chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," | chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," | ||||
} | } | ||||
@@ -100,39 +134,14 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { | |||||
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | ||||
input.UploadId = uploadID | input.UploadId = uploadID | ||||
partNumberMarker := 0 | |||||
for { | |||||
output, err := ObsCli.ListParts(&obs.ListPartsInput{ | |||||
Bucket: setting.Bucket, | |||||
Key: input.Key, | |||||
UploadId: uploadID, | |||||
MaxParts: MAX_LIST_PARTS, | |||||
PartNumberMarker: partNumberMarker, | |||||
}) | |||||
if err != nil { | |||||
log.Error("ListParts failed:", err.Error()) | |||||
return err | |||||
} | |||||
partNumberMarker = output.NextPartNumberMarker | |||||
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) | |||||
for _, partInfo := range output.Parts { | |||||
input.Parts = append(input.Parts, obs.Part{ | |||||
PartNumber: partInfo.PartNumber, | |||||
ETag: partInfo.ETag, | |||||
}) | |||||
} | |||||
if len(output.Parts) < output.MaxParts { | |||||
break | |||||
} else { | |||||
continue | |||||
} | |||||
break | |||||
allParts, err := listAllParts(uuid, uploadID, input.Key) | |||||
if err != nil { | |||||
log.Error("listAllParts failed: %v", err) | |||||
return err | |||||
} | } | ||||
input.Parts = allParts.Parts | |||||
output, err := ObsCli.CompleteMultipartUpload(input) | output, err := ObsCli.CompleteMultipartUpload(input) | ||||
if err != nil { | if err != nil { | ||||
log.Error("CompleteMultipartUpload failed:", err.Error()) | log.Error("CompleteMultipartUpload failed:", err.Error()) | ||||
@@ -266,6 +266,16 @@ search_related=related | |||||
search_maybe=maybe | search_maybe=maybe | ||||
search_ge= | search_ge= | ||||
wecome_AI_plt = Welcome to OpenI AI Collaboration Platform! | |||||
explore_AI = Explore better AI, come here to find more interesting | |||||
datasets = Datasets | |||||
repositories = Repositories | |||||
use_plt__fuction = To use the AI collaboration functions provided by this platform, such as: hosting code, sharing data, debugging algorithms or training models, start with | |||||
provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. | |||||
activity = Activity | |||||
no_events = There are no events related | |||||
or_t = or | |||||
[explore] | [explore] | ||||
repos = Repositories | repos = Repositories | ||||
select_repos = Select the project | select_repos = Select the project | ||||
@@ -2844,7 +2854,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr | |||||
mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror | ||||
approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>` | |||||
upload_dataset=`upload dataset <a href="%s/datasets">%s</a>` | |||||
task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>` | ||||
task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>` | ||||
task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>` | ||||
@@ -268,6 +268,18 @@ search_related=相关 | |||||
search_maybe=约为 | search_maybe=约为 | ||||
search_ge=个 | search_ge=个 | ||||
wecome_AI_plt=欢迎来到启智AI协作平台! | |||||
explore_AI = 探索更好的AI,来这里发现更有意思的 | |||||
datasets = 数据集 | |||||
repositories = 项目 | |||||
use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 | |||||
provide_resoure = 平台目前免费提供CPU、GPU、NPU的算力资源,可进行多种类型的AI任务。 | |||||
create_pro = 创建项目 | |||||
activity = 活动 | |||||
no_events = 还没有与您相关的活动 | |||||
or_t = 或 | |||||
[explore] | [explore] | ||||
repos=项目 | repos=项目 | ||||
select_repos=精选项目 | select_repos=精选项目 | ||||
@@ -2852,7 +2864,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a> | |||||
mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code> | ||||
approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>` | ||||
upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>` | |||||
upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>` | |||||
task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>` | ||||
task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>` | ||||
task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>` | ||||
@@ -99,6 +99,11 @@ socket.onmessage = function (e) { | |||||
console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); | ||||
continue; | continue; | ||||
} | } | ||||
if(record.OpType == "24"){ | |||||
if(record.Content.indexOf("true") != -1){ | |||||
continue; | |||||
} | |||||
} | |||||
var recordPrefix = getMsg(record); | var recordPrefix = getMsg(record); | ||||
if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ | ||||
html += recordPrefix + actionName; | html += recordPrefix + actionName; | ||||
@@ -162,7 +167,7 @@ socket.onmessage = function (e) { | |||||
function getTaskLink(record){ | function getTaskLink(record){ | ||||
var re = getRepoLink(record); | var re = getRepoLink(record); | ||||
if(record.OpType == 24){ | if(record.OpType == 24){ | ||||
re = re + "/datasets?type=" + record.Content; | |||||
re = re + "/datasets"; | |||||
}else if(record.OpType == 25){ | }else if(record.OpType == 25){ | ||||
re = re + "/cloudbrain/" + record.Content; | re = re + "/cloudbrain/" + record.Content; | ||||
}else if(record.OpType == 26){ | }else if(record.OpType == 26){ | ||||
@@ -101,16 +101,20 @@ function initPageInfo(){ | |||||
function searchItem(type,sortType){ | function searchItem(type,sortType){ | ||||
console.log("enter item 2."); | console.log("enter item 2."); | ||||
currentSearchKeyword = document.getElementById("keyword_input").value; | |||||
if(!isEmpty(currentSearchKeyword)){ | |||||
initPageInfo(); | |||||
currentSearchTableName = itemType[type]; | |||||
currentSearchSortBy = sortBy[sortType]; | |||||
currentSearchAscending = sortAscending[sortType]; | |||||
OnlySearchLabel =false; | |||||
page(currentPage); | |||||
if(OnlySearchLabel){ | |||||
doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) | |||||
}else{ | }else{ | ||||
emptySearch(); | |||||
currentSearchKeyword = document.getElementById("keyword_input").value; | |||||
if(!isEmpty(currentSearchKeyword)){ | |||||
initPageInfo(); | |||||
currentSearchTableName = itemType[type]; | |||||
currentSearchSortBy = sortBy[sortType]; | |||||
currentSearchAscending = sortAscending[sortType]; | |||||
OnlySearchLabel =false; | |||||
page(currentPage); | |||||
}else{ | |||||
emptySearch(); | |||||
} | |||||
} | } | ||||
} | } | ||||
@@ -806,17 +810,21 @@ var repoAndOrgEN={ | |||||
function page(current){ | function page(current){ | ||||
currentPage=current; | currentPage=current; | ||||
startIndex = currentPage -1; | |||||
if(startIndex < 1){ | |||||
startIndex = 1; | |||||
} | |||||
endIndex = currentPage + 2; | |||||
if(endIndex >= totalPage){ | |||||
endIndex = totalPage; | |||||
} | |||||
doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); | ||||
} | } | ||||
function nextPage(){ | function nextPage(){ | ||||
currentPage = currentPage+1; | currentPage = currentPage+1; | ||||
console.log("currentPage=" + currentPage); | console.log("currentPage=" + currentPage); | ||||
if(currentPage >= endIndex){ | |||||
startIndex=startIndex+1; | |||||
endIndex = endIndex +1; | |||||
} | |||||
page(currentPage); | page(currentPage); | ||||
} | } | ||||
@@ -824,10 +832,6 @@ function page(current){ | |||||
console.log("currentPage=" + currentPage); | console.log("currentPage=" + currentPage); | ||||
if(currentPage > 1){ | if(currentPage > 1){ | ||||
currentPage = currentPage-1; | currentPage = currentPage-1; | ||||
if(currentPage <= startIndex && startIndex > 1){ | |||||
startIndex = startIndex -1; | |||||
endIndex = endIndex - 1; | |||||
} | |||||
console.log("currentPage=" + (currentPage)); | console.log("currentPage=" + (currentPage)); | ||||
page(currentPage); | page(currentPage); | ||||
} | } | ||||
@@ -862,7 +866,7 @@ function getYPosition(e){ | |||||
showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); | ||||
} | } | ||||
else if(goNum<=totalPage){ | else if(goNum<=totalPage){ | ||||
page(goNum); | |||||
page(parseInt(goNum,10)); | |||||
} | } | ||||
else{ | else{ | ||||
showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); | ||||
@@ -908,6 +912,11 @@ function getYPosition(e){ | |||||
} | } | ||||
} | } | ||||
if (endIndex < totalPage-1){ | |||||
html += "..."; | |||||
html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>"; | |||||
} | |||||
if(currentPage >=totalPage){ | if(currentPage >=totalPage){ | ||||
html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>"; | ||||
html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>"; | ||||
@@ -11,7 +11,6 @@ import ( | |||||
"fmt" | "fmt" | ||||
"mime/multipart" | "mime/multipart" | ||||
"net/http" | "net/http" | ||||
"path" | |||||
"strconv" | "strconv" | ||||
"strings" | "strings" | ||||
@@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { | |||||
}) | }) | ||||
} | } | ||||
func GetObsKey(ctx *context.Context) { | |||||
uuid := gouuid.NewV4().String() | |||||
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") | |||||
ctx.JSON(200, map[string]string{ | |||||
"uuid": uuid, | |||||
"key": key, | |||||
"access_key_id": setting.AccessKeyID, | |||||
"secret_access_key": setting.SecretAccessKey, | |||||
"server": setting.Endpoint, | |||||
"bucket": setting.Bucket, | |||||
}) | |||||
} | |||||
func CompleteMultipart(ctx *context.Context) { | func CompleteMultipart(ctx *context.Context) { | ||||
uuid := ctx.Query("uuid") | uuid := ctx.Query("uuid") | ||||
uploadID := ctx.Query("uploadID") | uploadID := ctx.Query("uploadID") | ||||
@@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { | |||||
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) | ||||
return | return | ||||
} | } | ||||
attachment.UpdateDatasetUpdateUnix() | |||||
repository, _ := models.GetRepositoryByID(dataset.RepoID) | repository, _ := models.GetRepositoryByID(dataset.RepoID) | ||||
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) | |||||
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) | |||||
if attachment.DatasetID != 0 { | if attachment.DatasetID != 0 { | ||||
if isCanDecompress(attachment.Name) { | if isCanDecompress(attachment.Name) { | ||||
if typeCloudBrain == models.TypeCloudBrainOne { | if typeCloudBrain == models.TypeCloudBrainOne { | ||||
@@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) { | |||||
}) | }) | ||||
} | } | ||||
func UpdateMultipart(ctx *context.Context) { | |||||
uuid := ctx.Query("uuid") | |||||
partNumber := ctx.QueryInt("chunkNumber") | |||||
etag := ctx.Query("etag") | |||||
fileChunk, err := models.GetFileChunkByUUID(uuid) | |||||
if err != nil { | |||||
if models.IsErrFileChunkNotExist(err) { | |||||
ctx.Error(404) | |||||
} else { | |||||
ctx.ServerError("GetFileChunkByUUID", err) | |||||
} | |||||
return | |||||
} | |||||
fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1)) | |||||
err = models.UpdateFileChunk(fileChunk) | |||||
if err != nil { | |||||
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) | |||||
return | |||||
} | |||||
ctx.JSON(200, map[string]string{ | |||||
"result_code": "0", | |||||
}) | |||||
} | |||||
func HandleUnDecompressAttachment() { | func HandleUnDecompressAttachment() { | ||||
attachs, err := models.GetUnDecompressAttachments() | attachs, err := models.GetUnDecompressAttachments() | ||||
if err != nil { | if err != nil { | ||||
@@ -59,6 +59,7 @@ var ( | |||||
) | ) | ||||
const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" | const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" | ||||
const CLONE_FILE_PREFIX = "file:///" | |||||
var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) | var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) | ||||
@@ -1142,7 +1143,8 @@ func GetRate(ctx *context.Context) { | |||||
} | } | ||||
func downloadCode(repo *models.Repository, codePath, branchName string) error { | func downloadCode(repo *models.Repository, codePath, branchName string) error { | ||||
if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil { | |||||
//add "file:///" prefix to make the depth valid | |||||
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { | |||||
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) | ||||
return err | return err | ||||
} | } | ||||
@@ -1202,7 +1204,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo | |||||
return err | return err | ||||
} | } | ||||
if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil { | |||||
if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil { | |||||
log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) | log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) | ||||
return err | return err | ||||
} | } | ||||
@@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm | |||||
func NotebookShow(ctx *context.Context) { | func NotebookShow(ctx *context.Context) { | ||||
ctx.Data["PageIsCloudBrain"] = true | ctx.Data["PageIsCloudBrain"] = true | ||||
debugListType := ctx.Query("debugListType") | debugListType := ctx.Query("debugListType") | ||||
if debugListType == "" { | |||||
debugListType = "all" | |||||
} | |||||
var ID = ctx.Params(":id") | var ID = ctx.Params(":id") | ||||
task, err := models.GetCloudbrainByIDWithDeleted(ID) | task, err := models.GetCloudbrainByIDWithDeleted(ID) | ||||
if err != nil { | if err != nil { | ||||
@@ -1027,10 +1029,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||||
gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
Branch: branch_name, | |||||
}); err != nil { | |||||
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | |||||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) | |||||
trainJobErrorNewDataPrepare(ctx, form) | trainJobErrorNewDataPrepare(ctx, form) | ||||
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) | ||||
return | return | ||||
@@ -1245,9 +1245,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||||
gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
Branch: branch_name, | |||||
}); err != nil { | |||||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) | ||||
versionErrorDataPrepare(ctx, form) | versionErrorDataPrepare(ctx, form) | ||||
ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) | ||||
@@ -1874,9 +1872,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference | |||||
gitRepo, _ := git.OpenRepository(repo.RepoPath()) | gitRepo, _ := git.OpenRepository(repo.RepoPath()) | ||||
commitID, _ := gitRepo.GetBranchCommitID(branch_name) | commitID, _ := gitRepo.GetBranchCommitID(branch_name) | ||||
if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ | |||||
Branch: branch_name, | |||||
}); err != nil { | |||||
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { | |||||
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) | ||||
inferenceJobErrorNewDataPrepare(ctx, form) | inferenceJobErrorNewDataPrepare(ctx, form) | ||||
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) | ||||
@@ -40,8 +40,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
dataHeader := map[string]string{ | dataHeader := map[string]string{ | ||||
"A1": ctx.Tr("user.static.id"), | "A1": ctx.Tr("user.static.id"), | ||||
"B1": ctx.Tr("user.static.name"), | "B1": ctx.Tr("user.static.name"), | ||||
"C1": ctx.Tr("user.static.codemergecount"), | |||||
"D1": ctx.Tr("user.static.UserIndex"), | |||||
"C1": ctx.Tr("user.static.UserIndex"), | |||||
"D1": ctx.Tr("user.static.codemergecount"), | |||||
"E1": ctx.Tr("user.static.commitcount"), | "E1": ctx.Tr("user.static.commitcount"), | ||||
"F1": ctx.Tr("user.static.issuecount"), | "F1": ctx.Tr("user.static.issuecount"), | ||||
"G1": ctx.Tr("user.static.commentcount"), | "G1": ctx.Tr("user.static.commentcount"), | ||||
@@ -54,11 +54,11 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
"N1": ctx.Tr("user.static.encyclopediascount"), | "N1": ctx.Tr("user.static.encyclopediascount"), | ||||
"O1": ctx.Tr("user.static.createrepocount"), | "O1": ctx.Tr("user.static.createrepocount"), | ||||
"P1": ctx.Tr("user.static.openiindex"), | "P1": ctx.Tr("user.static.openiindex"), | ||||
"Q1": ctx.Tr("user.static.registdate"), | |||||
"R1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
"S1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
"T1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
"U1": ctx.Tr("user.static.CommitModelCount"), | |||||
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
"R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
"S1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
"T1": ctx.Tr("user.static.CommitModelCount"), | |||||
"U1": ctx.Tr("user.static.registdate"), | |||||
"V1": ctx.Tr("user.static.countdate"), | "V1": ctx.Tr("user.static.countdate"), | ||||
} | } | ||||
for k, v := range dataHeader { | for k, v := range dataHeader { | ||||
@@ -77,8 +77,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
rows := fmt.Sprint(row) | rows := fmt.Sprint(row) | ||||
xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | ||||
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | ||||
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||||
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||||
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | ||||
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | ||||
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | ||||
@@ -91,14 +91,12 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac | |||||
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | ||||
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | ||||
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | ||||
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||||
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||||
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||||
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | ||||
xlsx.SetCellValue(sheetName, "Q"+rows, formatTime[0:len(formatTime)-3]) | |||||
xlsx.SetCellValue(sheetName, "R"+rows, userRecord.CloudBrainTaskNum) | |||||
xlsx.SetCellValue(sheetName, "S"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitDatasetNum) | |||||
xlsx.SetCellValue(sheetName, "U"+rows, userRecord.CommitModelCount) | |||||
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||||
formatTime = userRecord.DataDate | formatTime = userRecord.DataDate | ||||
xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | ||||
} | } | ||||
@@ -241,8 +239,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
dataHeader := map[string]string{ | dataHeader := map[string]string{ | ||||
"A1": ctx.Tr("user.static.id"), | "A1": ctx.Tr("user.static.id"), | ||||
"B1": ctx.Tr("user.static.name"), | "B1": ctx.Tr("user.static.name"), | ||||
"C1": ctx.Tr("user.static.codemergecount"), | |||||
"D1": ctx.Tr("user.static.UserIndex"), | |||||
"C1": ctx.Tr("user.static.UserIndex"), | |||||
"D1": ctx.Tr("user.static.codemergecount"), | |||||
"E1": ctx.Tr("user.static.commitcount"), | "E1": ctx.Tr("user.static.commitcount"), | ||||
"F1": ctx.Tr("user.static.issuecount"), | "F1": ctx.Tr("user.static.issuecount"), | ||||
"G1": ctx.Tr("user.static.commentcount"), | "G1": ctx.Tr("user.static.commentcount"), | ||||
@@ -255,11 +253,11 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
"N1": ctx.Tr("user.static.encyclopediascount"), | "N1": ctx.Tr("user.static.encyclopediascount"), | ||||
"O1": ctx.Tr("user.static.createrepocount"), | "O1": ctx.Tr("user.static.createrepocount"), | ||||
"P1": ctx.Tr("user.static.openiindex"), | "P1": ctx.Tr("user.static.openiindex"), | ||||
"Q1": ctx.Tr("user.static.registdate"), | |||||
"R1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
"S1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
"T1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
"U1": ctx.Tr("user.static.CommitModelCount"), | |||||
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"), | |||||
"R1": ctx.Tr("user.static.CloudBrainRunTime"), | |||||
"S1": ctx.Tr("user.static.CommitDatasetNum"), | |||||
"T1": ctx.Tr("user.static.CommitModelCount"), | |||||
"U1": ctx.Tr("user.static.registdate"), | |||||
"V1": ctx.Tr("user.static.countdate"), | "V1": ctx.Tr("user.static.countdate"), | ||||
} | } | ||||
for k, v := range dataHeader { | for k, v := range dataHeader { | ||||
@@ -272,8 +270,8 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) | ||||
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) | ||||
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) | |||||
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) | |||||
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount) | |||||
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) | ||||
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) | ||||
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) | ||||
@@ -286,13 +284,12 @@ func QueryUserStaticDataPage(ctx *context.Context) { | |||||
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) | ||||
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) | ||||
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) | ||||
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) | |||||
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) | |||||
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) | |||||
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") | ||||
xlsx.SetCellValue(sheetName, "Q"+rows, formatTime[0:len(formatTime)-3]) | |||||
xlsx.SetCellValue(sheetName, "R"+rows, userRecord.CloudBrainTaskNum) | |||||
xlsx.SetCellValue(sheetName, "S"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) | |||||
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitDatasetNum) | |||||
xlsx.SetCellValue(sheetName, "U"+rows, userRecord.CommitModelCount) | |||||
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) | |||||
formatTime = userRecord.DataDate | formatTime = userRecord.DataDate | ||||
xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | xlsx.SetCellValue(sheetName, "V"+rows, formatTime) | ||||
} | } | ||||
@@ -608,12 +608,11 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) | m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) | ||||
m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) | m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) | ||||
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) | ||||
m.Post("/complete_multipart", repo.CompleteMultipart) | |||||
m.Post("/update_chunk", repo.UpdateMultipart) | |||||
}, reqSignIn) | }, reqSignIn) | ||||
m.Group("/attachments", func() { | m.Group("/attachments", func() { | ||||
m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) | m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) | ||||
m.Post("/complete_multipart", repo.CompleteMultipart) | |||||
}) | }) | ||||
m.Group("/attachments", func() { | m.Group("/attachments", func() { | ||||
@@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||||
topicsQuery := elastic.NewMatchQuery("topics", Key) | topicsQuery := elastic.NewMatchQuery("topics", Key) | ||||
boolQ.Should(topicsQuery) | boolQ.Should(topicsQuery) | ||||
res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||||
res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) | |||||
} | } | ||||
} | } | ||||
func getSort(SortBy string, ascending bool) elastic.Sorter { | |||||
var sort elastic.Sorter | |||||
sort = elastic.NewScoreSort() | |||||
if SortBy != "" { | |||||
if SortBy == "default" { | |||||
return sort | |||||
func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { | |||||
sort := make([]elastic.Sorter, 0) | |||||
if SortBy == "default" || SortBy == "" { | |||||
sort = append(sort, elastic.NewScoreSort()) | |||||
if secondSortBy != "" { | |||||
log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) | |||||
sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) | |||||
} | } | ||||
return elastic.NewFieldSort(SortBy).Order(ascending) | |||||
} else { | |||||
sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) | |||||
} | } | ||||
log.Info("sort size=" + fmt.Sprint(len(sort))) | |||||
return sort | return sort | ||||
} | } | ||||
@@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||||
topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") | ||||
boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | boolQ.Should(nameQuery, descriptionQuery, topicsQuery) | ||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa | |||||
} else { | } else { | ||||
log.Info("query all content.") | log.Info("query all content.") | ||||
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | ||||
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in | |||||
boolQ.Must(UserOrOrgQuery) | boolQ.Must(UserOrOrgQuery) | ||||
} | } | ||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||||
fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") | ||||
categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") | ||||
boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) | ||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, | |||||
} else { | } else { | ||||
log.Info("query all datasets.") | log.Info("query all datasets.") | ||||
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} | ||||
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in | |||||
boolQ.Must(isIssueQuery) | boolQ.Must(isIssueQuery) | ||||
} | } | ||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||||
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) | |||||
if err == nil { | if err == nil { | ||||
searchJson, _ := json.Marshal(res) | searchJson, _ := json.Marshal(res) | ||||
log.Info("searchJson=" + string(searchJson)) | log.Info("searchJson=" + string(searchJson)) | ||||
@@ -10,7 +10,7 @@ import ( | |||||
"github.com/elliotchance/orderedmap" | "github.com/elliotchance/orderedmap" | ||||
) | ) | ||||
var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} | |||||
var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} | |||||
type ClientsManager struct { | type ClientsManager struct { | ||||
Clients *orderedmap.OrderedMap | Clients *orderedmap.OrderedMap | ||||
@@ -187,7 +187,7 @@ td, th { | |||||
{{.i18n.Tr "repo.cloudbrain"}} | {{.i18n.Tr "repo.cloudbrain"}} | ||||
</a> | </a> | ||||
<div class="divider"> / </div> | <div class="divider"> / </div> | ||||
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||||
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||||
{{$.i18n.Tr "repo.modelarts.notebook"}} | {{$.i18n.Tr "repo.modelarts.notebook"}} | ||||
</a> | </a> | ||||
<div class="divider"> / </div> | <div class="divider"> / </div> | ||||
@@ -281,7 +281,7 @@ | |||||
</span> | </span> | ||||
<el-dropdown-menu slot="dropdown"> | <el-dropdown-menu slot="dropdown"> | ||||
<el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item> | <el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item> | ||||
<el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item> | |||||
<!-- <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>--> | |||||
{{if and ($.CanWrite) (eq .DecompressState 1) }} | {{if and ($.CanWrite) (eq .DecompressState 1) }} | ||||
<el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item> | <el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item> | ||||
{{end}} | {{end}} | ||||
@@ -380,7 +380,7 @@ | |||||
<div class="menu" style="right: auto;"> | <div class="menu" style="right: auto;"> | ||||
<div class="item" style="padding: 0 !important;"> | <div class="item" style="padding: 0 !important;"> | ||||
{{if .CanDebug}} | {{if .CanDebug}} | ||||
<a id="model-image-{{.Cloudbrain.ID}}" class='imageBtn ui basic {{if ne .Status "RUNNING"}} disabled{{else}}blue {{end}}button' href="{{$.RepoLink}}/cloudbrain/{{.Cloudbrain.ID}}/commit_image">{{$.i18n.Tr "repo.submit_image"}}</a> | |||||
<a id="model-image-{{.Cloudbrain.ID}}" class='imageBtn ui basic {{if ne .Status "RUNNING"}}disabled {{else}}blue {{end}}button' href="{{$.RepoLink}}/cloudbrain/{{.Cloudbrain.ID}}/commit_image">{{$.i18n.Tr "repo.submit_image"}}</a> | |||||
{{else}} | {{else}} | ||||
<a class="imageBtn ui basic disabled button">{{$.i18n.Tr "repo.submit_image"}}</a> | <a class="imageBtn ui basic disabled button">{{$.i18n.Tr "repo.submit_image"}}</a> | ||||
{{end}} | {{end}} | ||||
@@ -193,7 +193,7 @@ td, th { | |||||
{{.i18n.Tr "repo.cloudbrain"}} | {{.i18n.Tr "repo.cloudbrain"}} | ||||
</a> | </a> | ||||
<div class="divider"> / </div> | <div class="divider"> / </div> | ||||
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}"> | |||||
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all"> | |||||
{{$.i18n.Tr "repo.modelarts.notebook"}} | {{$.i18n.Tr "repo.modelarts.notebook"}} | ||||
</a> | </a> | ||||
<div class="divider"> / </div> | <div class="divider"> / </div> | ||||
@@ -5,13 +5,110 @@ | |||||
{{template "base/alert" .}} | {{template "base/alert" .}} | ||||
<div class="ui mobile reversed stackable grid"> | <div class="ui mobile reversed stackable grid"> | ||||
<div class="ui container ten wide column"> | <div class="ui container ten wide column"> | ||||
<div class="default" id = 'default_page' style="display: none;"> | |||||
<div class="w_title"> | |||||
{{.i18n.Tr "home.wecome_AI_plt"}} | |||||
</div> | |||||
<div class="content"> | |||||
<p >{{.i18n.Tr "home.explore_AI"}} <a href="{{AppSubUrl}}/explore/repos"> {{.i18n.Tr "home.repositories"}}</a> {{.i18n.Tr "home.or_t"}} <a href="{{AppSubUrl}}/explore/datasets">{{.i18n.Tr "home.datasets"}}</a></p> | |||||
<p >{{.i18n.Tr "home.use_plt__fuction"}} <a class="mini ui blue button" href="{{AppSubUrl}}/repo/create{{if .ContextUser.IsOrganization}}?org={{.ContextUser.ID}}{{end}}" >{{.i18n.Tr "repo.create_repo"}}</a></p> | |||||
<p > {{.i18n.Tr "home.provide_resoure"}}</p> | |||||
</div> | |||||
<div class="guide "> | |||||
<a class="mini ui blue basic button" style="font-weight:700" href="https://git.openi.org.cn/zeizei/OpenI_Learning" target="_blank">{{.i18n.Tr "custom.Platform_Tutorial"}} <i class="ri-arrow-right-line" ></i></a> | |||||
</div> | |||||
</div> | |||||
{{if .EnableHeatmap}} | {{if .EnableHeatmap}} | ||||
{{template "user/dashboard/heatmap" .}} | {{template "user/dashboard/heatmap" .}} | ||||
{{end}} | {{end}} | ||||
{{template "user/dashboard/feeds" .}} | {{template "user/dashboard/feeds" .}} | ||||
<diV id = "activity_cont" style="display: none;"> | |||||
<div class="ui placeholder segment bgtask-none padding_none line" > | |||||
<div class="act_title" style="padding-left: 0px ;"> | |||||
{{.i18n.Tr "home.activity"}} : | |||||
</div> | |||||
<div class="ui icon header bgtask-header-pic"></div> | |||||
<p class="p_hint"> | |||||
{{.i18n.Tr "home.no_events"}} | |||||
</p> | |||||
</div> | |||||
</diV> | |||||
</div> | </div> | ||||
{{template "user/dashboard/repolist" .}} | {{template "user/dashboard/repolist" .}} | ||||
</div> | </div> | ||||
</div> | </div> | ||||
</div> | </div> | ||||
{{template "base/footer" .}} | {{template "base/footer" .}} | ||||
<script> | |||||
const {AppSubUrl, StaticUrlPrefix, csrf} = window.config; | |||||
uid_ = Number((document.querySelector('meta[name=_context_uid]') || {}).content) | |||||
// console.log("uid:",uid_) | |||||
let URL = AppSubUrl + '/api/v1/repos/search?sort=updated&order=desc&uid='+uid_ +'&q=&page=1&limit=10&mode= '; | |||||
$.getJSON(URL, (result, _textStatus, request) => { | |||||
const counts_pro = request.getResponseHeader('X-Total-Count'); | |||||
console.log("count:",counts_pro) | |||||
if (counts_pro == 0){ | |||||
document.getElementById("default_page").style.display = "block"; | |||||
document.getElementById("activity_cont").style.display = "block" | |||||
} | |||||
}) | |||||
</script> | |||||
<style> | |||||
.default{ | |||||
background-color: rgba(24, 144, 255, 0.1); | |||||
margin-bottom: 20px; | |||||
border-radius: 15px; | |||||
line-height: 20px; | |||||
padding:0px 25px; | |||||
} | |||||
.w_title{ | |||||
padding-top: 25px; | |||||
color: rgba(16, 16, 16, 100); | |||||
font-size: 20px; | |||||
text-align: left; | |||||
font-weight: 700; | |||||
} | |||||
.content{ | |||||
color: rgba(80, 85, 89, 100); | |||||
font-size: 14px; | |||||
text-align: left; | |||||
font-family: SourceHanSansSC-regular; | |||||
margin-top: 20px; | |||||
} | |||||
.guide{ | |||||
margin-top:30px; | |||||
padding-bottom: 30px; | |||||
} | |||||
.activity{ | |||||
margin-top: 20px; | |||||
} | |||||
.act_title{ | |||||
color: rgba(16, 16, 16, 100) !important; | |||||
font-size: 20px; | |||||
text-align: left; | |||||
background-color: #fff !important; | |||||
font-weight: 700; | |||||
} | |||||
.p_hint{ | |||||
color: rgba(136, 136, 136, 100); | |||||
font-size: 14px; | |||||
text-align: center; | |||||
font-family: SourceHanSansSC-regular; | |||||
} | |||||
.padding_none{ | |||||
padding: 0px !important; | |||||
} | |||||
.ui.placeholder.segment { | |||||
min-height: 15rem !important; | |||||
} | |||||
.line{ | |||||
border-top: 1px solid rgba(187, 187, 187, 0.5) !important; | |||||
margin-top: 20px !important; | |||||
} | |||||
</style> |
@@ -71,7 +71,7 @@ | |||||
{{ $index := index .GetIssueInfos 0}} | {{ $index := index .GetIssueInfos 0}} | ||||
{{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | {{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}} | ||||
{{else if eq .GetOpType 24}} | {{else if eq .GetOpType 24}} | ||||
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}} | |||||
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}} | |||||
{{else if eq .GetOpType 25}} | {{else if eq .GetOpType 25}} | ||||
{{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | {{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}} | ||||
{{else if eq .GetOpType 26}} | {{else if eq .GetOpType 26}} | ||||
@@ -27,6 +27,7 @@ import createDropzone from '../features/dropzone.js'; | |||||
const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | ||||
const chunkSize = 1024 * 1024 * 64; | const chunkSize = 1024 * 1024 * 64; | ||||
const md5ChunkSize = 1024 * 1024 * 1; | |||||
export default { | export default { | ||||
props:{ | props:{ | ||||
@@ -190,10 +191,9 @@ export default { | |||||
let currentChunk = 0; | let currentChunk = 0; | ||||
const time = new Date().getTime(); | const time = new Date().getTime(); | ||||
// console.log('计算MD5...') | |||||
this.status = this.dropzoneParams.data('md5-computing'); | this.status = this.dropzoneParams.data('md5-computing'); | ||||
file.totalChunkCounts = chunks; | file.totalChunkCounts = chunks; | ||||
loadNext(); | |||||
loadMd5Next(); | |||||
fileReader.onload = (e) => { | fileReader.onload = (e) => { | ||||
fileLoaded.call(this, e); | fileLoaded.call(this, e); | ||||
@@ -207,13 +207,12 @@ export default { | |||||
spark.append(e.target.result); // Append array buffer | spark.append(e.target.result); // Append array buffer | ||||
currentChunk++; | currentChunk++; | ||||
if (currentChunk < chunks) { | if (currentChunk < chunks) { | ||||
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||||
this.status = `${this.dropzoneParams.data('loading-file')} ${( | this.status = `${this.dropzoneParams.data('loading-file')} ${( | ||||
(currentChunk / chunks) * | (currentChunk / chunks) * | ||||
100 | 100 | ||||
).toFixed(2)}% (${currentChunk}/${chunks})`; | ).toFixed(2)}% (${currentChunk}/${chunks})`; | ||||
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | ||||
loadNext(); | |||||
loadMd5Next(); | |||||
return; | return; | ||||
} | } | ||||
@@ -235,6 +234,13 @@ export default { | |||||
start + chunkSize >= file.size ? file.size : start + chunkSize; | start + chunkSize >= file.size ? file.size : start + chunkSize; | ||||
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | ||||
} | } | ||||
function loadMd5Next() { | |||||
const start = currentChunk * chunkSize; | |||||
const end = | |||||
start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize; | |||||
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
} | |||||
}, | }, | ||||
async computeMD5Success(md5edFile) { | async computeMD5Success(md5edFile) { | ||||
@@ -1,484 +0,0 @@ | |||||
<template> | |||||
<div class="dropzone-wrapper dataset-files"> | |||||
<div | |||||
id="dataset" | |||||
class="dropzone" | |||||
/> | |||||
<p class="upload-info"> | |||||
{{ file_status_text }} | |||||
<strong class="success text red">{{ status }}</strong> | |||||
</p> | |||||
<p>说明:<br> | |||||
- 只有zip格式的数据集才能发起云脑任务;<br> | |||||
- 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。 | |||||
</p> | |||||
</div> | |||||
</template> | |||||
<script> | |||||
/* eslint-disable eqeqeq */ | |||||
// import Dropzone from 'dropzone/dist/dropzone.js'; | |||||
// import 'dropzone/dist/dropzone.css' | |||||
import SparkMD5 from 'spark-md5'; | |||||
import axios from 'axios'; | |||||
import qs from 'qs'; | |||||
import createDropzone from '../features/dropzone.js'; | |||||
const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | |||||
const CloudBrainType = 1; | |||||
export default { | |||||
data() { | |||||
return { | |||||
dropzoneUploader: null, | |||||
maxFiles: 1, | |||||
maxFilesize: 1 * 1024 * 1024 * 1024 * 1024, | |||||
acceptedFiles: '*/*', | |||||
progress: 0, | |||||
status: '', | |||||
dropzoneParams: {}, | |||||
file_status_text: '' | |||||
}; | |||||
}, | |||||
async mounted() { | |||||
this.dropzoneParams = $('div#minioUploader-params'); | |||||
this.file_status_text = this.dropzoneParams.data('file-status'); | |||||
this.status = this.dropzoneParams.data('file-init-status'); | |||||
let previewTemplate = ''; | |||||
previewTemplate += '<div class="dz-preview dz-file-preview">\n '; | |||||
previewTemplate += ' <div class="dz-details">\n '; | |||||
previewTemplate += ' <div class="dz-filename">'; | |||||
previewTemplate += | |||||
' <span data-dz-name data-dz-thumbnail></span>'; | |||||
previewTemplate += ' </div>\n '; | |||||
previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n '; | |||||
previewTemplate += ' </div>\n '; | |||||
previewTemplate += ' <div class="dz-progress ui active progress">'; | |||||
previewTemplate += | |||||
' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n '; | |||||
previewTemplate += ' </div>\n '; | |||||
previewTemplate += ' <div class="dz-success-mark">'; | |||||
previewTemplate += ' <span>上传成功</span>'; | |||||
previewTemplate += ' </div>\n '; | |||||
previewTemplate += ' <div class="dz-error-mark">'; | |||||
previewTemplate += ' <span>上传失败</span>'; | |||||
previewTemplate += ' </div>\n '; | |||||
previewTemplate += ' <div class="dz-error-message">'; | |||||
previewTemplate += ' <span data-dz-errormessage></span>'; | |||||
previewTemplate += ' </div>\n'; | |||||
previewTemplate += '</div>'; | |||||
const $dropzone = $('div#dataset'); | |||||
console.log('createDropzone'); | |||||
const dropzoneUploader = await createDropzone($dropzone[0], { | |||||
url: '/todouploader', | |||||
maxFiles: this.maxFiles, | |||||
maxFilesize: this.maxFileSize, | |||||
timeout: 0, | |||||
autoQueue: false, | |||||
dictDefaultMessage: this.dropzoneParams.data('default-message'), | |||||
dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'), | |||||
dictFileTooBig: this.dropzoneParams.data('file-too-big'), | |||||
dictRemoveFile: this.dropzoneParams.data('remove-file'), | |||||
previewTemplate | |||||
}); | |||||
dropzoneUploader.on('addedfile', (file) => { | |||||
setTimeout(() => { | |||||
// eslint-disable-next-line no-unused-expressions | |||||
file.accepted && this.onFileAdded(file); | |||||
}, 200); | |||||
}); | |||||
dropzoneUploader.on('maxfilesexceeded', function (file) { | |||||
if (this.files[0].status !== 'success') { | |||||
alert(this.dropzoneParams.data('waitting-uploading')); | |||||
this.removeFile(file); | |||||
return; | |||||
} | |||||
this.removeAllFiles(); | |||||
this.addFile(file); | |||||
}); | |||||
this.dropzoneUploader = dropzoneUploader; | |||||
}, | |||||
methods: { | |||||
resetStatus() { | |||||
this.progress = 0; | |||||
this.status = ''; | |||||
}, | |||||
updateProgress(file, progress) { | |||||
file.previewTemplate.querySelector( | |||||
'.dz-upload' | |||||
).style.width = `${progress}%`; | |||||
}, | |||||
emitDropzoneSuccess(file) { | |||||
file.status = 'success'; | |||||
this.dropzoneUploader.emit('success', file); | |||||
this.dropzoneUploader.emit('complete', file); | |||||
}, | |||||
emitDropzoneFailed(file) { | |||||
this.status = this.dropzoneParams.data('falied'); | |||||
file.status = 'error'; | |||||
this.dropzoneUploader.emit('error', file); | |||||
// this.dropzoneUploader.emit('complete', file); | |||||
}, | |||||
onFileAdded(file) { | |||||
file.datasetId = document | |||||
.getElementById('datasetId') | |||||
.getAttribute('datasetId'); | |||||
this.resetStatus(); | |||||
this.computeMD5(file); | |||||
}, | |||||
finishUpload(file) { | |||||
this.emitDropzoneSuccess(file); | |||||
setTimeout(() => { | |||||
window.location.reload(); | |||||
}, 1000); | |||||
}, | |||||
computeMD5(file) { | |||||
this.resetStatus(); | |||||
const blobSlice = | |||||
File.prototype.slice || | |||||
File.prototype.mozSlice || | |||||
File.prototype.webkitSlice, | |||||
chunkSize = 1024 * 1024 * 64, | |||||
chunks = Math.ceil(file.size / chunkSize), | |||||
spark = new SparkMD5.ArrayBuffer(), | |||||
fileReader = new FileReader(); | |||||
let currentChunk = 0; | |||||
const time = new Date().getTime(); | |||||
// console.log('计算MD5...') | |||||
this.status = this.dropzoneParams.data('md5-computing'); | |||||
file.totalChunkCounts = chunks; | |||||
loadNext(); | |||||
fileReader.onload = (e) => { | |||||
fileLoaded.call(this, e); | |||||
}; | |||||
fileReader.onerror = (err) => { | |||||
console.warn('oops, something went wrong.', err); | |||||
file.cancel(); | |||||
}; | |||||
function fileLoaded(e) { | |||||
spark.append(e.target.result); // Append array buffer | |||||
currentChunk++; | |||||
if (currentChunk < chunks) { | |||||
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`); | |||||
this.status = `${this.dropzoneParams.data('loading-file')} ${( | |||||
(currentChunk / chunks) * | |||||
100 | |||||
).toFixed(2)}% (${currentChunk}/${chunks})`; | |||||
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||||
loadNext(); | |||||
return; | |||||
} | |||||
const md5 = spark.end(); | |||||
console.log( | |||||
`MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${ | |||||
file.size | |||||
} 用时:${(new Date().getTime() - time) / 1000} s` | |||||
); | |||||
spark.destroy(); // 释放缓存 | |||||
file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识 | |||||
file.cmd5 = false; // 取消计算md5状态 | |||||
this.computeMD5Success(file); | |||||
} | |||||
function loadNext() { | |||||
const start = currentChunk * chunkSize; | |||||
const end = | |||||
start + chunkSize >= file.size ? file.size : start + chunkSize; | |||||
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
} | |||||
}, | |||||
async computeMD5Success(md5edFile) { | |||||
const file = await this.getSuccessChunks(md5edFile); | |||||
try { | |||||
if (file.uploadID == '' || file.uuid == '') { | |||||
// 未上传过 | |||||
await this.newMultiUpload(file); | |||||
if (file.uploadID != '' && file.uuid != '') { | |||||
file.chunks = ''; | |||||
this.multipartUpload(file); | |||||
} else { | |||||
// 失败如何处理 | |||||
return; | |||||
} | |||||
return; | |||||
} | |||||
if (file.uploaded == '1') { | |||||
// 已上传成功 | |||||
// 秒传 | |||||
if (file.attachID == '0') { | |||||
// 删除数据集记录,未删除文件 | |||||
await addAttachment(file); | |||||
} | |||||
//不同数据集上传同一个文件 | |||||
if (file.datasetID != '' ) { | |||||
if (file.datasetName != "" && file.realName != "") { | |||||
var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")"; | |||||
window.alert(info); | |||||
window.location.reload(); | |||||
} | |||||
} | |||||
console.log('文件已上传完成'); | |||||
this.progress = 100; | |||||
this.status = this.dropzoneParams.data('upload-complete'); | |||||
this.finishUpload(file); | |||||
} else { | |||||
// 断点续传 | |||||
this.multipartUpload(file); | |||||
} | |||||
} catch (error) { | |||||
this.emitDropzoneFailed(file); | |||||
console.log(error); | |||||
} | |||||
async function addAttachment(file) { | |||||
return await axios.post( | |||||
'/attachments/add', | |||||
qs.stringify({ | |||||
uuid: file.uuid, | |||||
file_name: file.name, | |||||
size: file.size, | |||||
dataset_id: file.datasetId, | |||||
type: CloudBrainType, | |||||
_csrf: csrf, | |||||
}) | |||||
); | |||||
} | |||||
}, | |||||
async getSuccessChunks(file) { | |||||
const params = { | |||||
params: { | |||||
md5: file.uniqueIdentifier, | |||||
type: CloudBrainType, | |||||
file_name: file.name, | |||||
_csrf: csrf | |||||
} | |||||
}; | |||||
try { | |||||
const response = await axios.get('/attachments/get_chunks', params); | |||||
file.uploadID = response.data.uploadID; | |||||
file.uuid = response.data.uuid; | |||||
file.uploaded = response.data.uploaded; | |||||
file.chunks = response.data.chunks; | |||||
file.attachID = response.data.attachID; | |||||
file.datasetID = response.data.datasetID; | |||||
file.datasetName = response.data.datasetName; | |||||
file.realName = response.data.fileName; | |||||
return file; | |||||
} catch (error) { | |||||
this.emitDropzoneFailed(file); | |||||
console.log('getSuccessChunks catch: ', error); | |||||
return null; | |||||
} | |||||
}, | |||||
async newMultiUpload(file) { | |||||
const res = await axios.get('/attachments/new_multipart', { | |||||
params: { | |||||
totalChunkCounts: file.totalChunkCounts, | |||||
md5: file.uniqueIdentifier, | |||||
size: file.size, | |||||
fileType: file.type, | |||||
type: CloudBrainType, | |||||
file_name: file.name, | |||||
_csrf: csrf | |||||
} | |||||
}); | |||||
file.uploadID = res.data.uploadID; | |||||
file.uuid = res.data.uuid; | |||||
}, | |||||
multipartUpload(file) { | |||||
const blobSlice = | |||||
File.prototype.slice || | |||||
File.prototype.mozSlice || | |||||
File.prototype.webkitSlice, | |||||
chunkSize = 1024 * 1024 * 64, | |||||
chunks = Math.ceil(file.size / chunkSize), | |||||
fileReader = new FileReader(), | |||||
time = new Date().getTime(); | |||||
let currentChunk = 0; | |||||
function loadNext() { | |||||
const start = currentChunk * chunkSize; | |||||
const end = | |||||
start + chunkSize >= file.size ? file.size : start + chunkSize; | |||||
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end)); | |||||
} | |||||
function checkSuccessChunks() { | |||||
const index = successChunks.indexOf((currentChunk + 1).toString()); | |||||
if (index == -1) { | |||||
return false; | |||||
} | |||||
return true; | |||||
} | |||||
async function getUploadChunkUrl(currentChunk, partSize) { | |||||
const res = await axios.get('/attachments/get_multipart_url', { | |||||
params: { | |||||
uuid: file.uuid, | |||||
uploadID: file.uploadID, | |||||
size: partSize, | |||||
chunkNumber: currentChunk + 1, | |||||
type: CloudBrainType, | |||||
file_name: file.name, | |||||
_csrf: csrf | |||||
} | |||||
}); | |||||
urls[currentChunk] = res.data.url; | |||||
} | |||||
async function uploadMinio(url, e) { | |||||
let urls = []; | |||||
const res = await axios.put(url, e.target.result, { | |||||
headers: { | |||||
'Content-Type': '' | |||||
}}); | |||||
etags[currentChunk] = res.headers.etag; | |||||
} | |||||
async function uploadMinioNewMethod(url,e){ | |||||
var xhr = new XMLHttpRequest(); | |||||
xhr.open('PUT', url, false); | |||||
xhr.setRequestHeader('Content-Type', '') | |||||
xhr.send(e.target.result); | |||||
var etagValue = xhr.getResponseHeader('ETag'); | |||||
//console.log(etagValue); | |||||
etags[currentChunk] = etagValue; | |||||
} | |||||
async function updateChunk(currentChunk) { | |||||
await axios.post( | |||||
'/attachments/update_chunk', | |||||
qs.stringify({ | |||||
uuid: file.uuid, | |||||
chunkNumber: currentChunk + 1, | |||||
etag: etags[currentChunk], | |||||
type: CloudBrainType, | |||||
_csrf: csrf | |||||
}) | |||||
); | |||||
} | |||||
async function uploadChunk(e) { | |||||
try { | |||||
if (!checkSuccessChunks()) { | |||||
const start = currentChunk * chunkSize; | |||||
const partSize = | |||||
start + chunkSize >= file.size ? file.size - start : chunkSize; | |||||
// 获取分片上传url | |||||
await getUploadChunkUrl(currentChunk, partSize); | |||||
if (urls[currentChunk] != '') { | |||||
// 上传到minio | |||||
await uploadMinioNewMethod(urls[currentChunk], e); | |||||
if (etags[currentChunk] != '') { | |||||
// 更新数据库:分片上传结果 | |||||
//await updateChunk(currentChunk); | |||||
} else { | |||||
console.log("上传到minio uploadChunk etags[currentChunk] == ''");// TODO | |||||
} | |||||
} else { | |||||
console.log("uploadChunk urls[currentChunk] != ''");// TODO | |||||
} | |||||
} | |||||
} catch (error) { | |||||
this.emitDropzoneFailed(file); | |||||
console.log(error); | |||||
} | |||||
} | |||||
async function completeUpload() { | |||||
return await axios.post( | |||||
'/attachments/complete_multipart', | |||||
qs.stringify({ | |||||
uuid: file.uuid, | |||||
uploadID: file.uploadID, | |||||
file_name: file.name, | |||||
size: file.size, | |||||
dataset_id: file.datasetId, | |||||
type: CloudBrainType, | |||||
_csrf: csrf | |||||
}) | |||||
); | |||||
} | |||||
const successChunks = []; | |||||
let successParts = []; | |||||
successParts = file.chunks.split(','); | |||||
for (let i = 0; i < successParts.length; i++) { | |||||
successChunks[i] = successParts[i].split('-')[0]; | |||||
} | |||||
const urls = []; // TODO const ? | |||||
const etags = []; | |||||
console.log('上传分片...'); | |||||
this.status = this.dropzoneParams.data('uploading'); | |||||
loadNext(); | |||||
fileReader.onload = async (e) => { | |||||
await uploadChunk(e); | |||||
fileReader.abort(); | |||||
currentChunk++; | |||||
if (currentChunk < chunks) { | |||||
console.log( | |||||
`第${currentChunk}个分片上传完成, 开始第${currentChunk + | |||||
1}/${chunks}个分片上传` | |||||
); | |||||
this.progress = Math.ceil((currentChunk / chunks) * 100); | |||||
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2)); | |||||
this.status = `${this.dropzoneParams.data('uploading')} ${( | |||||
(currentChunk / chunks) * | |||||
100 | |||||
).toFixed(2)}%`; | |||||
await loadNext(); | |||||
} else { | |||||
await completeUpload(); | |||||
console.log( | |||||
`文件上传完成:${file.name} \n分片:${chunks} 大小:${ | |||||
file.size | |||||
} 用时:${(new Date().getTime() - time) / 1000} s` | |||||
); | |||||
this.progress = 100; | |||||
this.status = this.dropzoneParams.data('upload-complete'); | |||||
this.finishUpload(file); | |||||
} | |||||
}; | |||||
} | |||||
} | |||||
}; | |||||
</script> | |||||
<style> | |||||
.dropzone-wrapper { | |||||
margin: 0; | |||||
} | |||||
.ui .dropzone { | |||||
border: 2px dashed #0087f5; | |||||
box-shadow: none !important; | |||||
padding: 0; | |||||
min-height: 5rem; | |||||
border-radius: 4px; | |||||
} | |||||
.dataset .dataset-files #dataset .dz-preview.dz-file-preview, | |||||
.dataset .dataset-files #dataset .dz-preview.dz-processing { | |||||
display: flex; | |||||
align-items: center; | |||||
} | |||||
.dataset .dataset-files #dataset .dz-preview { | |||||
border-bottom: 1px solid #dadce0; | |||||
min-height: 0; | |||||
} | |||||
.upload-info{ | |||||
margin-top: 0.2em; | |||||
} | |||||
</style> |
@@ -63,11 +63,6 @@ | |||||
</template> | </template> | ||||
</el-table-column> | </el-table-column> | ||||
<el-table-column | <el-table-column | ||||
prop="CodeMergeCount" | |||||
label="PR数" | |||||
align="center"> | |||||
</el-table-column> | |||||
<el-table-column | |||||
prop="UserIndex" | prop="UserIndex" | ||||
label="用户指数" | label="用户指数" | ||||
width="120px" | width="120px" | ||||
@@ -77,6 +72,11 @@ | |||||
</template> | </template> | ||||
</el-table-column> | </el-table-column> | ||||
<el-table-column | <el-table-column | ||||
prop="CodeMergeCount" | |||||
label="PR数" | |||||
align="center"> | |||||
</el-table-column> | |||||
<el-table-column | |||||
prop="CommitCount" | prop="CommitCount" | ||||
label="commit数" | label="commit数" | ||||
align="center"> | align="center"> | ||||
@@ -135,15 +135,6 @@ | |||||
align="center"> | align="center"> | ||||
</el-table-column> | </el-table-column> | ||||
<el-table-column | <el-table-column | ||||
prop="RegistDate" | |||||
label="用户注册时间" | |||||
width="120px" | |||||
align="center"> | |||||
<template slot-scope="scope"> | |||||
{{scope.row.RegistDate | transformTimestamp}} | |||||
</template> | |||||
</el-table-column> | |||||
<el-table-column | |||||
prop="CloudBrainTaskNum" | prop="CloudBrainTaskNum" | ||||
label="云脑任务数" | label="云脑任务数" | ||||
width="120px" | width="120px" | ||||
@@ -170,7 +161,15 @@ | |||||
width="120px" | width="120px" | ||||
align="center"> | align="center"> | ||||
</el-table-column> | </el-table-column> | ||||
<el-table-column | |||||
prop="RegistDate" | |||||
label="用户注册时间" | |||||
width="120px" | |||||
align="center"> | |||||
<template slot-scope="scope"> | |||||
{{scope.row.RegistDate | transformTimestamp}} | |||||
</template> | |||||
</el-table-column> | |||||
<el-table-column | <el-table-column | ||||
prop="DataDate" | prop="DataDate" | ||||
label="系统统计时间" | label="系统统计时间" | ||||