diff --git a/models/dbsql/dataset_foreigntable_for_es.sql b/models/dbsql/dataset_foreigntable_for_es.sql index 02e5f0ddf..4a0cae70e 100644 --- a/models/dbsql/dataset_foreigntable_for_es.sql +++ b/models/dbsql/dataset_foreigntable_for_es.sql @@ -158,16 +158,20 @@ DROP TRIGGER IF EXISTS es_update_dataset on public.dataset; CREATE OR REPLACE FUNCTION public.update_dataset() RETURNS trigger AS $def$ BEGIN - UPDATE public.dataset_es - SET description=NEW.description, - title=NEW.title, - category=NEW.category, - task=NEW.task, - download_times=NEW.download_times, - updated_unix=NEW.updated_unix, - file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false), - file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false) - where id=NEW.id; + if (NEW.status=0) then + delete from public.dataset_es where id=NEW.id; + elsif (NEW.status=1) then + UPDATE public.dataset_es + SET description=NEW.description, + title=NEW.title, + category=NEW.category, + task=NEW.task, + download_times=NEW.download_times, + updated_unix=NEW.updated_unix, + file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false), + file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false) + where id=NEW.id; + end if; return new; END $def$ diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql index e927eb7c2..82c8781af 100644 --- a/models/dbsql/repo_foreigntable_for_es.sql +++ b/models/dbsql/repo_foreigntable_for_es.sql @@ -461,7 +461,7 @@ $def$ if not OLD.is_private and NEW.is_private then delete from public.issue_es where repo_id=NEW.id; - delete from public.dataset_es where repo_id=NEW.id; + -- delete from public.dataset_es where repo_id=NEW.id; delete from public.repository_es where id=NEW.id; end if; diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index 6e7921d75..b6fffca0e 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -238,6 +238,9 @@ func GetAllUserPublicRepoKPIStats(startTime time.Time, endTime time.Time) (map[s CommitLines: 0, } } + if value.Email == "1250125907@qq.com" || value.Email == "peiyongyu-34@163.com" { + log.Info("repo path=" + repository.RepoPath()) + } authors[key].Commits += value.Commits authors[key].CommitLines += value.CommitLines diff --git a/models/user_analysis_for_activity.go b/models/user_analysis_for_activity.go index e69eecae0..d8e4a5500 100644 --- a/models/user_analysis_for_activity.go +++ b/models/user_analysis_for_activity.go @@ -195,7 +195,7 @@ func queryPullRequestPublic(start_unix int64, end_unix int64, publicAllRepo map[ sess := x.NewSession() defer sess.Close() resultMap := make(map[int64]int) - cond := "pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix) + cond := "issue.created_unix>=" + fmt.Sprint(start_unix) + " and issue.created_unix<=" + fmt.Sprint(end_unix) count, err := sess.Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).Count(new(Issue)) if err != nil { log.Info("query issue error. return.") diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 6cbb97999..e09937df3 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -326,7 +326,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error { ReadOnly: true, }, }) - } else { + } else if len(req.DatasetInfos) > 1 { for _, dataset := range req.DatasetInfos { volumes = append(volumes, models.Volume{ HostPath: models.StHostPath{ @@ -547,7 +547,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e GPUNumber: resourceSpec.GpuNum, MemoryMB: resourceSpec.MemMiB, ShmMB: resourceSpec.ShareMemMiB, - Command: GetCloudbrainDebugCommand(),//Command, + Command: GetCloudbrainDebugCommand(), //Command, NeedIBDevice: false, IsMainRole: false, UseNNI: false, diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go index 1a7b657d5..52d91e77a 100644 --- a/modules/git/repo_stats_custom.go +++ b/modules/git/repo_stats_custom.go @@ -62,7 +62,7 @@ func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (m after := startTime.Format(time.RFC3339) until := endTime.Format(time.RFC3339) - args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)} + args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until='%s'", until)} stdout, err := NewCommand(args...).RunInDirBytes(repoPath) if err != nil { return nil, err diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 257101331..777dbcb10 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -2,7 +2,6 @@ package repo import ( "bufio" - "code.gitea.io/gitea/modules/notification" "encoding/json" "errors" "fmt" @@ -16,6 +15,8 @@ import ( "time" "unicode/utf8" + "code.gitea.io/gitea/modules/notification" + "code.gitea.io/gitea/modules/grampus" "code.gitea.io/gitea/modules/timeutil" @@ -475,6 +476,7 @@ func CloudBrainNew(ctx *context.Context) { ctx.ServerError("get new cloudbrain info failed", err) return } + ctx.Data["PageIsGPUDebug"] = true ctx.HTML(200, tplCloudBrainNew) } @@ -540,13 +542,17 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { return } } - - datasetInfos, datasetNames, err := models.GetDatasetInfo(uuids) - if err != nil { - log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) - cloudBrainTrainJobErrorPrepare(ctx, form) - ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form) - return + var datasetInfos map[string]models.DatasetInfo + var datasetNames string + //var + if uuids != "" { + datasetInfos, datasetNames, err = models.GetDatasetInfo(uuids) + if err != nil { + log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) + cloudBrainNewDataPrepare(ctx) + ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form) + return + } } command := cloudbrain.GetCloudbrainDebugCommand() diff --git a/routers/search.go b/routers/search.go index 05074df55..628350424 100644 --- a/routers/search.go +++ b/routers/search.go @@ -313,9 +313,8 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) if err == nil { - searchJson, _ := json.Marshal(res) - log.Info("searchJson=" + string(searchJson)) esresult := makeRepoResult(res, Key, OnlyReturnNum, language) + setForkRepoOrder(esresult) resultObj.Total = resultObj.PrivateTotal + esresult.Total isNeedSort := false if len(resultObj.Result) > 0 { @@ -348,6 +347,30 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa } } +func setForkRepoOrder(esresult *SearchRes) { + forkidMap := make(map[string]int, 0) + for index, re := range esresult.Result { + if re["fork_id"] != nil { + fork_id := re["fork_id"].(string) + if _, ok := forkidMap[fork_id]; !ok { + forkidMap[fork_id] = index + } + } + } + for key, value := range forkidMap { + for index, re := range esresult.Result { + if re["id"].(string) == key { + if value < index { //swap + tmp := esresult.Result[index] + esresult.Result[index] = esresult.Result[value] + esresult.Result[value] = tmp + break + } + } + } + } +} + func sortRepo(Result []map[string]interface{}, SortBy string, ascending bool) { orderBy := "" switch SortBy { @@ -479,6 +502,7 @@ func makeRepoResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool, record["num_stars"] = recordSource["num_stars"] record["num_forks"] = recordSource["num_forks"] record["lower_alias"] = recordSource["lower_alias"] + record["fork_id"] = recordSource["fork_id"] if recordSource["topics"] != nil { topicsStr := recordSource["topics"].(string) log.Info("topicsStr=" + topicsStr) diff --git a/web_src/js/components/dataset/selectDataset.vue b/web_src/js/components/dataset/selectDataset.vue index 4c056094e..3596fea1f 100755 --- a/web_src/js/components/dataset/selectDataset.vue +++ b/web_src/js/components/dataset/selectDataset.vue @@ -955,7 +955,8 @@ export default { ) { this.benchmarkNew = true; } - if (location.href.indexOf("modelarts/notebook/create") !== -1) { + if (location.href.indexOf("modelarts/notebook/create") !== -1 || location.href.indexOf("/cloudbrain/create") !== -1) { + console.log("required is false;"); this.required = false; } window.onresize = () => {