Browse Source

Merge branch 'V20220428' into fix-1885

pull/1965/head
zhoupzh 3 years ago
parent
commit
3ccb372680
21 changed files with 157 additions and 642 deletions
  1. +9
    -0
      models/attachment.go
  2. +6
    -1
      models/user_business_analysis.go
  3. +47
    -38
      modules/storage/obs.go
  4. +1
    -1
      options/locale/locale_en-US.ini
  5. +1
    -1
      options/locale/locale_zh-CN.ini
  6. +6
    -1
      public/home/home.js
  7. +28
    -19
      public/home/search.js
  8. +2
    -46
      routers/repo/attachment.go
  9. +4
    -2
      routers/repo/cloudbrain.go
  10. +7
    -11
      routers/repo/modelarts.go
  11. +8
    -8
      routers/repo/user_data_analysis.go
  12. +1
    -2
      routers/routes/routes.go
  13. +17
    -14
      routers/search.go
  14. +1
    -1
      services/socketwrap/clientManager.go
  15. +1
    -1
      templates/repo/cloudbrain/show.tmpl
  16. +1
    -1
      templates/repo/datasets/index.tmpl
  17. +1
    -1
      templates/repo/modelarts/notebook/show.tmpl
  18. +1
    -1
      templates/user/dashboard/feeds.tmpl
  19. +10
    -4
      web_src/js/components/MinioUploader.vue
  20. +0
    -484
      web_src/js/components/ObsUploader.vue
  21. +5
    -5
      web_src/js/components/UserAnalysis.vue

+ 9
- 0
models/attachment.go View File

@@ -10,6 +10,7 @@ import (
"io"
"path"
"strings"
"time"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/obs"
@@ -104,6 +105,14 @@ func (a *Attachment) IncreaseDownloadCount() error {
return nil
}

func (a *Attachment) UpdateDatasetUpdateUnix() error {
// Update download count.
if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil {
return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err)
}
return nil
}

// APIFormat converts models.Attachment to api.Attachment
func (a *Attachment) APIFormat() *api.Attachment {
return &api.Attachment{


+ 6
- 1
models/user_business_analysis.go View File

@@ -831,7 +831,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
codeLine := float64(dateRecord.CommitCodeSize) / 1000
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 100)
if codeLine >= limitCodeLine {
codeLine = limitCodeLine
}
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)


+ 47
- 38
modules/storage/obs.go View File

@@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) {
return hasObject, nil
}

func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) {
output = &obs.ListPartsOutput{}
partNumberMarker := 0
for {
temp, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: key,
UploadId: uploadID,
MaxParts: MAX_LIST_PARTS,
PartNumberMarker: partNumberMarker,
})
if err != nil {
log.Error("ListParts failed:", err.Error())
return output, err
}

partNumberMarker = temp.NextPartNumberMarker
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts))

for _, partInfo := range temp.Parts {
output.Parts = append(output.Parts, obs.Part{
PartNumber: partInfo.PartNumber,
ETag: partInfo.ETag,
})
}

if len(temp.Parts) < temp.MaxParts {
break
} else {
continue
}

break
}

return output, nil
}

func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

output, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: key,
UploadId: uploadID,
})
allParts, err := listAllParts(uuid, uploadID, key)
if err != nil {
log.Error("ListParts failed:", err.Error())
log.Error("listAllParts failed: %v", err)
return "", err
}

var chunks string
for _, partInfo := range output.Parts {
for _, partInfo := range allParts.Parts {
chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + ","
}

@@ -100,39 +134,14 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error {
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.UploadId = uploadID

partNumberMarker := 0
for {
output, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: input.Key,
UploadId: uploadID,
MaxParts: MAX_LIST_PARTS,
PartNumberMarker: partNumberMarker,
})
if err != nil {
log.Error("ListParts failed:", err.Error())
return err
}

partNumberMarker = output.NextPartNumberMarker
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts))

for _, partInfo := range output.Parts {
input.Parts = append(input.Parts, obs.Part{
PartNumber: partInfo.PartNumber,
ETag: partInfo.ETag,
})
}

if len(output.Parts) < output.MaxParts {
break
} else {
continue
}

break
allParts, err := listAllParts(uuid, uploadID, input.Key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return err
}

input.Parts = allParts.Parts

output, err := ObsCli.CompleteMultipartUpload(input)
if err != nil {
log.Error("CompleteMultipartUpload failed:", err.Error())


+ 1
- 1
options/locale/locale_en-US.ini View File

@@ -2854,7 +2854,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr
mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror
approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>`
reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>`
upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>`
upload_dataset=`upload dataset <a href="%s/datasets">%s</a>`
task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>`
task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>`
task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>`


+ 1
- 1
options/locale/locale_zh-CN.ini View File

@@ -2864,7 +2864,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a>
mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code>
approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>`
reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>`
upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>`
upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>`
task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>`
task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>`
task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>`


+ 6
- 1
public/home/home.js View File

@@ -99,6 +99,11 @@ socket.onmessage = function (e) {
console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null.");
continue;
}
if(record.OpType == "24"){
if(record.Content.indexOf("true") != -1){
continue;
}
}
var recordPrefix = getMsg(record);
if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){
html += recordPrefix + actionName;
@@ -162,7 +167,7 @@ socket.onmessage = function (e) {
function getTaskLink(record){
var re = getRepoLink(record);
if(record.OpType == 24){
re = re + "/datasets?type=" + record.Content;
re = re + "/datasets";
}else if(record.OpType == 25){
re = re + "/cloudbrain/" + record.Content;
}else if(record.OpType == 26){


+ 28
- 19
public/home/search.js View File

@@ -101,16 +101,20 @@ function initPageInfo(){

function searchItem(type,sortType){
console.log("enter item 2.");
currentSearchKeyword = document.getElementById("keyword_input").value;
if(!isEmpty(currentSearchKeyword)){
initPageInfo();
currentSearchTableName = itemType[type];
currentSearchSortBy = sortBy[sortType];
currentSearchAscending = sortAscending[sortType];
OnlySearchLabel =false;
page(currentPage);
if(OnlySearchLabel){
doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType])
}else{
emptySearch();
currentSearchKeyword = document.getElementById("keyword_input").value;
if(!isEmpty(currentSearchKeyword)){
initPageInfo();
currentSearchTableName = itemType[type];
currentSearchSortBy = sortBy[sortType];
currentSearchAscending = sortAscending[sortType];
OnlySearchLabel =false;
page(currentPage);
}else{
emptySearch();
}
}
}

@@ -806,17 +810,21 @@ var repoAndOrgEN={
function page(current){
currentPage=current;
startIndex = currentPage -1;
if(startIndex < 1){
startIndex = 1;
}
endIndex = currentPage + 2;
if(endIndex >= totalPage){
endIndex = totalPage;
}
doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel);
}
function nextPage(){
currentPage = currentPage+1;
console.log("currentPage=" + currentPage);
if(currentPage >= endIndex){
startIndex=startIndex+1;
endIndex = endIndex +1;
}
page(currentPage);
}
@@ -824,10 +832,6 @@ function page(current){
console.log("currentPage=" + currentPage);
if(currentPage > 1){
currentPage = currentPage-1;
if(currentPage <= startIndex && startIndex > 1){
startIndex = startIndex -1;
endIndex = endIndex - 1;
}
console.log("currentPage=" + (currentPage));
page(currentPage);
}
@@ -862,7 +866,7 @@ function getYPosition(e){
showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top);
}
else if(goNum<=totalPage){
page(goNum);
page(parseInt(goNum,10));
}
else{
showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top);
@@ -908,6 +912,11 @@ function getYPosition(e){
}
}

if (endIndex < totalPage-1){
html += "...";
html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>";
}

if(currentPage >=totalPage){
html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>";
html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>";


+ 2
- 46
routers/repo/attachment.go View File

@@ -11,7 +11,6 @@ import (
"fmt"
"mime/multipart"
"net/http"
"path"
"strconv"
"strings"

@@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) {
})
}

func GetObsKey(ctx *context.Context) {
uuid := gouuid.NewV4().String()
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/")

ctx.JSON(200, map[string]string{
"uuid": uuid,
"key": key,
"access_key_id": setting.AccessKeyID,
"secret_access_key": setting.SecretAccessKey,
"server": setting.Endpoint,
"bucket": setting.Bucket,
})
}

func CompleteMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
@@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err))
return
}
attachment.UpdateDatasetUpdateUnix()
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment)

notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {
if isCanDecompress(attachment.Name) {
if typeCloudBrain == models.TypeCloudBrainOne {
@@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) {
})
}

func UpdateMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
partNumber := ctx.QueryInt("chunkNumber")
etag := ctx.Query("etag")

fileChunk, err := models.GetFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}

fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1))

err = models.UpdateFileChunk(fileChunk)
if err != nil {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}

ctx.JSON(200, map[string]string{
"result_code": "0",
})
}

func HandleUnDecompressAttachment() {
attachs, err := models.GetUnDecompressAttachments()
if err != nil {


+ 4
- 2
routers/repo/cloudbrain.go View File

@@ -59,6 +59,7 @@ var (
)

const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types"
const CLONE_FILE_PREFIX = "file:///"

var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0)

@@ -1142,7 +1143,8 @@ func GetRate(ctx *context.Context) {
}

func downloadCode(repo *models.Repository, codePath, branchName string) error {
if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil {
//add "file:///" prefix to make the depth valid
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}
@@ -1202,7 +1204,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo
return err
}

if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil {
if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err)
return err
}


+ 7
- 11
routers/repo/modelarts.go View File

@@ -247,7 +247,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
func NotebookShow(ctx *context.Context) {
ctx.Data["PageIsCloudBrain"] = true
debugListType := ctx.Query("debugListType")

if debugListType == "" {
debugListType = "all"
}
var ID = ctx.Params(":id")
task, err := models.GetCloudbrainByIDWithDeleted(ID)
if err != nil {
@@ -1027,10 +1029,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)

if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form)
return
@@ -1245,9 +1245,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)
if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form)
@@ -1874,9 +1872,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)

if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)
inferenceJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form)


+ 8
- 8
routers/repo/user_data_analysis.go View File

@@ -40,8 +40,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.codemergecount"),
"D1": ctx.Tr("user.static.UserIndex"),
"C1": ctx.Tr("user.static.UserIndex"),
"D1": ctx.Tr("user.static.codemergecount"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
@@ -77,8 +77,8 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
rows := fmt.Sprint(row)
xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)
@@ -239,8 +239,8 @@ func QueryUserStaticDataPage(ctx *context.Context) {
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.codemergecount"),
"D1": ctx.Tr("user.static.UserIndex"),
"C1": ctx.Tr("user.static.UserIndex"),
"D1": ctx.Tr("user.static.codemergecount"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
@@ -270,8 +270,8 @@ func QueryUserStaticDataPage(ctx *context.Context) {

xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "C"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "D"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)


+ 1
- 2
routers/routes/routes.go View File

@@ -608,12 +608,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload)
m.Get("/obs_proxy_download", repo.GetOBSProxyDownload)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)
m.Post("/update_chunk", repo.UpdateMultipart)
}, reqSignIn)

m.Group("/attachments", func() {
m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState)
m.Post("/complete_multipart", repo.CompleteMultipart)
})

m.Group("/attachments", func() {


+ 17
- 14
routers/search.go View File

@@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int)
topicsQuery := elastic.NewMatchQuery("topics", Key)
boolQ.Should(topicsQuery)

res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context())
res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int)
}
}

func getSort(SortBy string, ascending bool) elastic.Sorter {
var sort elastic.Sorter
sort = elastic.NewScoreSort()
if SortBy != "" {
if SortBy == "default" {
return sort
func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter {
sort := make([]elastic.Sorter, 0)
if SortBy == "default" || SortBy == "" {
sort = append(sort, elastic.NewScoreSort())
if secondSortBy != "" {
log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy)
sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending))
}
return elastic.NewFieldSort(SortBy).Order(ascending)
} else {
sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending))
}
log.Info("sort size=" + fmt.Sprint(len(sort)))
return sort
}

@@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa
topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third")
boolQ.Should(nameQuery, descriptionQuery, topicsQuery)

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa
} else {
log.Info("query all content.")
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in
boolQ.Must(UserOrOrgQuery)
}

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int,
fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third")
categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth")
boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery)
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int,
} else {
log.Info("query all datasets.")
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in
boolQ.Must(isIssueQuery)
}

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))


+ 1
- 1
services/socketwrap/clientManager.go View File

@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap"
)

var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31}
var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}

type ClientsManager struct {
Clients *orderedmap.OrderedMap


+ 1
- 1
templates/repo/cloudbrain/show.tmpl View File

@@ -187,7 +187,7 @@ td, th {
{{.i18n.Tr "repo.cloudbrain"}}
</a>
<div class="divider"> / </div>
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}">
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all">
{{$.i18n.Tr "repo.modelarts.notebook"}}
</a>
<div class="divider"> / </div>


+ 1
- 1
templates/repo/datasets/index.tmpl View File

@@ -281,7 +281,7 @@
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item>
<el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>
<!-- <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>-->
{{if and ($.CanWrite) (eq .DecompressState 1) }}
<el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item>
{{end}}


+ 1
- 1
templates/repo/modelarts/notebook/show.tmpl View File

@@ -193,7 +193,7 @@ td, th {
{{.i18n.Tr "repo.cloudbrain"}}
</a>
<div class="divider"> / </div>
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}">
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all">
{{$.i18n.Tr "repo.modelarts.notebook"}}
</a>
<div class="divider"> / </div>


+ 1
- 1
templates/user/dashboard/feeds.tmpl View File

@@ -71,7 +71,7 @@
{{ $index := index .GetIssueInfos 0}}
{{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}}
{{else if eq .GetOpType 24}}
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}}
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}}
{{else if eq .GetOpType 25}}
{{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}}
{{else if eq .GetOpType 26}}


+ 10
- 4
web_src/js/components/MinioUploader.vue View File

@@ -27,6 +27,7 @@ import createDropzone from '../features/dropzone.js';

const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config;
const chunkSize = 1024 * 1024 * 64;
const md5ChunkSize = 1024 * 1024 * 1;

export default {
props:{
@@ -190,10 +191,9 @@ export default {
let currentChunk = 0;

const time = new Date().getTime();
// console.log('计算MD5...')
this.status = this.dropzoneParams.data('md5-computing');
file.totalChunkCounts = chunks;
loadNext();
loadMd5Next();

fileReader.onload = (e) => {
fileLoaded.call(this, e);
@@ -207,13 +207,12 @@ export default {
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
this.status = `${this.dropzoneParams.data('loading-file')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}% (${currentChunk}/${chunks})`;
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
loadNext();
loadMd5Next();
return;
}

@@ -235,6 +234,13 @@ export default {
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}

function loadMd5Next() {
const start = currentChunk * chunkSize;
const end =
start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
},

async computeMD5Success(md5edFile) {


+ 0
- 484
web_src/js/components/ObsUploader.vue View File

@@ -1,484 +0,0 @@
<template>
<div class="dropzone-wrapper dataset-files">
<div
id="dataset"
class="dropzone"
/>
<p class="upload-info">
{{ file_status_text }}
<strong class="success text red">{{ status }}</strong>
</p>
<p>说明:<br>
- 只有zip格式的数据集才能发起云脑任务;<br>
- 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。
</p>
</div>
</template>

<script>
/* eslint-disable eqeqeq */
// import Dropzone from 'dropzone/dist/dropzone.js';
// import 'dropzone/dist/dropzone.css'
import SparkMD5 from 'spark-md5';
import axios from 'axios';
import qs from 'qs';
import createDropzone from '../features/dropzone.js';

const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config;
const CloudBrainType = 1;

export default {
data() {
return {
dropzoneUploader: null,
maxFiles: 1,
maxFilesize: 1 * 1024 * 1024 * 1024 * 1024,
acceptedFiles: '*/*',
progress: 0,
status: '',
dropzoneParams: {},
file_status_text: ''
};
},

async mounted() {
this.dropzoneParams = $('div#minioUploader-params');
this.file_status_text = this.dropzoneParams.data('file-status');
this.status = this.dropzoneParams.data('file-init-status');

let previewTemplate = '';
previewTemplate += '<div class="dz-preview dz-file-preview">\n ';
previewTemplate += ' <div class="dz-details">\n ';
previewTemplate += ' <div class="dz-filename">';
previewTemplate +=
' <span data-dz-name data-dz-thumbnail></span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n ';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-progress ui active progress">';
previewTemplate +=
' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n ';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-success-mark">';
previewTemplate += ' <span>上传成功</span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-error-mark">';
previewTemplate += ' <span>上传失败</span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-error-message">';
previewTemplate += ' <span data-dz-errormessage></span>';
previewTemplate += ' </div>\n';
previewTemplate += '</div>';

const $dropzone = $('div#dataset');
console.log('createDropzone');
const dropzoneUploader = await createDropzone($dropzone[0], {
url: '/todouploader',
maxFiles: this.maxFiles,
maxFilesize: this.maxFileSize,
timeout: 0,
autoQueue: false,
dictDefaultMessage: this.dropzoneParams.data('default-message'),
dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'),
dictFileTooBig: this.dropzoneParams.data('file-too-big'),
dictRemoveFile: this.dropzoneParams.data('remove-file'),
previewTemplate
});
dropzoneUploader.on('addedfile', (file) => {
setTimeout(() => {
// eslint-disable-next-line no-unused-expressions
file.accepted && this.onFileAdded(file);
}, 200);
});
dropzoneUploader.on('maxfilesexceeded', function (file) {
if (this.files[0].status !== 'success') {
alert(this.dropzoneParams.data('waitting-uploading'));
this.removeFile(file);
return;
}
this.removeAllFiles();
this.addFile(file);
});

this.dropzoneUploader = dropzoneUploader;
},
methods: {
resetStatus() {
this.progress = 0;
this.status = '';
},
updateProgress(file, progress) {
file.previewTemplate.querySelector(
'.dz-upload'
).style.width = `${progress}%`;
},
emitDropzoneSuccess(file) {
file.status = 'success';
this.dropzoneUploader.emit('success', file);
this.dropzoneUploader.emit('complete', file);
},
emitDropzoneFailed(file) {
this.status = this.dropzoneParams.data('falied');
file.status = 'error';
this.dropzoneUploader.emit('error', file);
// this.dropzoneUploader.emit('complete', file);
},
onFileAdded(file) {
file.datasetId = document
.getElementById('datasetId')
.getAttribute('datasetId');
this.resetStatus();
this.computeMD5(file);
},

finishUpload(file) {
this.emitDropzoneSuccess(file);
setTimeout(() => {
window.location.reload();
}, 1000);
},

computeMD5(file) {
this.resetStatus();
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
File.prototype.webkitSlice,
chunkSize = 1024 * 1024 * 64,
chunks = Math.ceil(file.size / chunkSize),
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
let currentChunk = 0;

const time = new Date().getTime();
// console.log('计算MD5...')
this.status = this.dropzoneParams.data('md5-computing');
file.totalChunkCounts = chunks;
loadNext();

fileReader.onload = (e) => {
fileLoaded.call(this, e);
};
fileReader.onerror = (err) => {
console.warn('oops, something went wrong.', err);
file.cancel();
};

function fileLoaded(e) {
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
this.status = `${this.dropzoneParams.data('loading-file')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}% (${currentChunk}/${chunks})`;
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
loadNext();
return;
}

const md5 = spark.end();
console.log(
`MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
spark.destroy(); // 释放缓存
file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识
file.cmd5 = false; // 取消计算md5状态
this.computeMD5Success(file);
}

function loadNext() {
const start = currentChunk * chunkSize;
const end =
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
},

async computeMD5Success(md5edFile) {
const file = await this.getSuccessChunks(md5edFile);
try {
if (file.uploadID == '' || file.uuid == '') {
// 未上传过
await this.newMultiUpload(file);
if (file.uploadID != '' && file.uuid != '') {
file.chunks = '';
this.multipartUpload(file);
} else {
// 失败如何处理
return;
}
return;
}

if (file.uploaded == '1') {
// 已上传成功
// 秒传
if (file.attachID == '0') {
// 删除数据集记录,未删除文件
await addAttachment(file);
}
//不同数据集上传同一个文件
if (file.datasetID != '' ) {
if (file.datasetName != "" && file.realName != "") {
var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")";
window.alert(info);
window.location.reload();
}
}
console.log('文件已上传完成');
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
this.finishUpload(file);
} else {
// 断点续传
this.multipartUpload(file);
}
} catch (error) {
this.emitDropzoneFailed(file);
console.log(error);
}

async function addAttachment(file) {
return await axios.post(
'/attachments/add',
qs.stringify({
uuid: file.uuid,
file_name: file.name,
size: file.size,
dataset_id: file.datasetId,
type: CloudBrainType,
_csrf: csrf,
})
);
}
},

async getSuccessChunks(file) {
const params = {
params: {
md5: file.uniqueIdentifier,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
};
try {
const response = await axios.get('/attachments/get_chunks', params);
file.uploadID = response.data.uploadID;
file.uuid = response.data.uuid;
file.uploaded = response.data.uploaded;
file.chunks = response.data.chunks;
file.attachID = response.data.attachID;
file.datasetID = response.data.datasetID;
file.datasetName = response.data.datasetName;
file.realName = response.data.fileName;
return file;
} catch (error) {
this.emitDropzoneFailed(file);
console.log('getSuccessChunks catch: ', error);
return null;
}
},

async newMultiUpload(file) {
const res = await axios.get('/attachments/new_multipart', {
params: {
totalChunkCounts: file.totalChunkCounts,
md5: file.uniqueIdentifier,
size: file.size,
fileType: file.type,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
});
file.uploadID = res.data.uploadID;
file.uuid = res.data.uuid;
},

multipartUpload(file) {
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
File.prototype.webkitSlice,
chunkSize = 1024 * 1024 * 64,
chunks = Math.ceil(file.size / chunkSize),
fileReader = new FileReader(),
time = new Date().getTime();
let currentChunk = 0;

function loadNext() {
const start = currentChunk * chunkSize;
const end =
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}

function checkSuccessChunks() {
const index = successChunks.indexOf((currentChunk + 1).toString());
if (index == -1) {
return false;
}
return true;
}

async function getUploadChunkUrl(currentChunk, partSize) {
const res = await axios.get('/attachments/get_multipart_url', {
params: {
uuid: file.uuid,
uploadID: file.uploadID,
size: partSize,
chunkNumber: currentChunk + 1,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
});
urls[currentChunk] = res.data.url;
}

async function uploadMinio(url, e) {
let urls = [];
const res = await axios.put(url, e.target.result, {
headers: {
'Content-Type': ''
}});
etags[currentChunk] = res.headers.etag;
}

async function uploadMinioNewMethod(url,e){
var xhr = new XMLHttpRequest();
xhr.open('PUT', url, false);
xhr.setRequestHeader('Content-Type', '')
xhr.send(e.target.result);
var etagValue = xhr.getResponseHeader('ETag');
//console.log(etagValue);
etags[currentChunk] = etagValue;
}

async function updateChunk(currentChunk) {
await axios.post(
'/attachments/update_chunk',
qs.stringify({
uuid: file.uuid,
chunkNumber: currentChunk + 1,
etag: etags[currentChunk],
type: CloudBrainType,
_csrf: csrf
})
);
}
async function uploadChunk(e) {
try {
if (!checkSuccessChunks()) {
const start = currentChunk * chunkSize;
const partSize =
start + chunkSize >= file.size ? file.size - start : chunkSize;
// 获取分片上传url
await getUploadChunkUrl(currentChunk, partSize);
if (urls[currentChunk] != '') {
// 上传到minio
await uploadMinioNewMethod(urls[currentChunk], e);
if (etags[currentChunk] != '') {
// 更新数据库:分片上传结果
//await updateChunk(currentChunk);
} else {
console.log("上传到minio uploadChunk etags[currentChunk] == ''");// TODO
}
} else {
console.log("uploadChunk urls[currentChunk] != ''");// TODO
}
}
} catch (error) {
this.emitDropzoneFailed(file);
console.log(error);
}
}

async function completeUpload() {
return await axios.post(
'/attachments/complete_multipart',
qs.stringify({
uuid: file.uuid,
uploadID: file.uploadID,
file_name: file.name,
size: file.size,
dataset_id: file.datasetId,
type: CloudBrainType,
_csrf: csrf
})
);
}

const successChunks = [];
let successParts = [];
successParts = file.chunks.split(',');
for (let i = 0; i < successParts.length; i++) {
successChunks[i] = successParts[i].split('-')[0];
}
const urls = []; // TODO const ?
const etags = [];
console.log('上传分片...');
this.status = this.dropzoneParams.data('uploading');
loadNext();
fileReader.onload = async (e) => {
await uploadChunk(e);
fileReader.abort();
currentChunk++;
if (currentChunk < chunks) {
console.log(
`第${currentChunk}个分片上传完成, 开始第${currentChunk +
1}/${chunks}个分片上传`
);
this.progress = Math.ceil((currentChunk / chunks) * 100);
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
this.status = `${this.dropzoneParams.data('uploading')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}%`;
await loadNext();
} else {
await completeUpload();
console.log(
`文件上传完成:${file.name} \n分片:${chunks} 大小:${
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
this.finishUpload(file);
}
};
}
}
};
</script>

<style>
.dropzone-wrapper {
margin: 0;
}
.ui .dropzone {
border: 2px dashed #0087f5;
box-shadow: none !important;
padding: 0;
min-height: 5rem;
border-radius: 4px;
}
.dataset .dataset-files #dataset .dz-preview.dz-file-preview,
.dataset .dataset-files #dataset .dz-preview.dz-processing {
display: flex;
align-items: center;
}
.dataset .dataset-files #dataset .dz-preview {
border-bottom: 1px solid #dadce0;
min-height: 0;
}
.upload-info{
margin-top: 0.2em;
}
</style>

+ 5
- 5
web_src/js/components/UserAnalysis.vue View File

@@ -63,11 +63,6 @@
</template>
</el-table-column>
<el-table-column
prop="CodeMergeCount"
label="PR数"
align="center">
</el-table-column>
<el-table-column
prop="UserIndex"
label="用户指数"
width="120px"
@@ -77,6 +72,11 @@
</template>
</el-table-column>
<el-table-column
prop="CodeMergeCount"
label="PR数"
align="center">
</el-table-column>
<el-table-column
prop="CommitCount"
label="commit数"
align="center">


Loading…
Cancel
Save