diff --git a/models/attachment.go b/models/attachment.go index 7c95a73dd..ea8f1645f 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -10,6 +10,7 @@ import ( "io" "path" "strings" + "time" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" @@ -64,6 +65,7 @@ type AttachmentInfo struct { Repo *Repository `xorm:"extends"` RelAvatarLink string `xorm:"extends"` UserName string `xorm:"extends"` + Recommend bool `xorm:"-"` } type AttachmentsOptions struct { @@ -78,6 +80,7 @@ type AttachmentsOptions struct { JustNeedZipFile bool NeedRepoInfo bool Keyword string + RecommendOnly bool } func (a *Attachment) AfterUpdate() { @@ -104,6 +107,14 @@ func (a *Attachment) IncreaseDownloadCount() error { return nil } +func (a *Attachment) UpdateDatasetUpdateUnix() error { + // Update download count. + if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { + return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err) + } + return nil +} + // APIFormat converts models.Attachment to api.Attachment func (a *Attachment) APIFormat() *api.Attachment { return &api.Attachment{ @@ -570,6 +581,11 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { builder.Eq{"attachment.is_private": opts.IsPrivate}, ) } + if opts.RecommendOnly { + cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id"). + From("attachment"). + Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true"))) + } if opts.JustNeedZipFile { var DecompressState []int32 @@ -618,6 +634,7 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) { if err != nil { return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err) } + attachment.Recommend = dataset.Recommend repo, err := GetRepositoryByID(dataset.RepoID) if err == nil { attachment.Repo = repo diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 45ed52bd5..480350c8d 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -580,6 +580,8 @@ type CommitImageParams struct { Topics []string CloudBrainType int UID int64 + Place string + Type int } type CommitImageResult struct { diff --git a/models/cloudbrain_image.go b/models/cloudbrain_image.go index c88db0f67..eb21e0d87 100644 --- a/models/cloudbrain_image.go +++ b/models/cloudbrain_image.go @@ -567,12 +567,12 @@ func isImageStaring(e Engine, userID, imageID int64) bool { } func RecommendImage(imageId int64, recommond bool) error { - image := Image{Type: getRecommondType(recommond)} + image := Image{Type: GetRecommondType(recommond)} _, err := x.ID(imageId).Cols("type").Update(image) return err } -func getRecommondType(recommond bool) int { +func GetRecommondType(recommond bool) int { if recommond { return RECOMMOND_TYPE diff --git a/models/dataset.go b/models/dataset.go index 95800100c..d3a142742 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -23,7 +23,8 @@ type Dataset struct { Category string Description string `xorm:"TEXT"` DownloadTimes int64 - NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` + NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` + Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"` License string Task string ReleaseID int64 `xorm:"INDEX"` @@ -99,6 +100,7 @@ type SearchDatasetOptions struct { OwnerID int64 RepoID int64 IncludePublic bool + RecommendOnly bool Category string Task string License string @@ -132,6 +134,13 @@ func CreateDataset(dataset *Dataset) (err error) { } +func RecommendDataset(dataSetId int64, recommend bool) error { + + dataset := Dataset{Recommend: recommend} + _, err := x.ID(dataSetId).Cols("recommend").Update(dataset) + return err +} + func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) { cond := SearchDatasetCondition(opts) return SearchDatasetByCondition(opts, cond) @@ -146,7 +155,6 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.RepoID > 0 { cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) } - if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) cond = cond.And(builder.Eq{"attachment.is_private": false}) @@ -185,6 +193,10 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C cond = cond.And(builder.Eq{"dataset.license": opts.License}) } + if opts.RecommendOnly { + cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) + } + return cond } @@ -198,7 +210,7 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da defer sess.Close() datasets := make(DatasetList, 0, opts.PageSize) - selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars" + selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend" count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). diff --git a/models/helper_environment.go b/models/helper_environment.go index bc9d4c8fc..7248f6f61 100644 --- a/models/helper_environment.go +++ b/models/helper_environment.go @@ -12,15 +12,19 @@ import ( // env keys for git hooks need const ( - EnvRepoName = "GITEA_REPO_NAME" - EnvRepoUsername = "GITEA_REPO_USER_NAME" - EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" - EnvPusherName = "GITEA_PUSHER_NAME" - EnvPusherEmail = "GITEA_PUSHER_EMAIL" - EnvPusherID = "GITEA_PUSHER_ID" - EnvKeyID = "GITEA_KEY_ID" - EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" - EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoName = "GITEA_REPO_NAME" + EnvRepoUsername = "GITEA_REPO_USER_NAME" + EnvRepoIsWiki = "GITEA_REPO_IS_WIKI" + EnvPusherName = "GITEA_PUSHER_NAME" + EnvPusherEmail = "GITEA_PUSHER_EMAIL" + EnvPusherID = "GITEA_PUSHER_ID" + EnvKeyID = "GITEA_KEY_ID" + EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY" + EnvIsInternal = "GITEA_INTERNAL_PUSH" + EnvRepoSize = "REPO_CURRENT_SIZE" + EnvRepoMaxFileSize = "REPO_MAX_FILE_SIZE" + EnvRepoMaxSize = "REPO_MAX_SIZE" + EnvPushSizeCheckFlag = "PUSH_SIZE_CHECK_FLAG" ) // InternalPushingEnvironment returns an os environment to switch off hooks on push diff --git a/models/issue.go b/models/issue.go index 19f00d5f3..3ed49ce42 100755 --- a/models/issue.go +++ b/models/issue.go @@ -775,6 +775,41 @@ func (issue *Issue) ChangeContent(doer *User, content string) (err error) { return sess.Commit() } +// ChangeRef changes issue ref, as the given user. +func (issue *Issue) ChangeRef(doer *User, newRef string) (err error) { + oldRef := issue.Ref + issue.Ref = newRef + if oldRef == newRef { + return nil + } + + sess := x.NewSession() + defer sess.Close() + if err = sess.Begin(); err != nil { + return err + } + + if err = updateIssueCols(sess, issue, "ref"); err != nil { + sess.Rollback() + return fmt.Errorf("UpdateIssueCols: %v", err) + } + + var opts = &CreateCommentOptions{ + Type: CommentTypeRef, + Doer: doer, + Repo: issue.Repo, + Issue: issue, + OldRef: oldRef, + NewRef: newRef, + } + if _, err = createComment(sess, opts); err != nil { + sess.Rollback() + return err + } + + return sess.Commit() +} + // GetTasks returns the amount of tasks in the issues content func (issue *Issue) GetTasks() int { return len(issueTasksPat.FindAllStringIndex(issue.Content, -1)) diff --git a/models/issue_comment.go b/models/issue_comment.go index 60d38452c..8197eba85 100755 --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -90,6 +90,8 @@ const ( CommentTypeReviewRequest // merge pull request CommentTypeMergePull + // Ref changed + CommentTypeRef ) // CommentTag defines comment tag type diff --git a/models/repo_list.go b/models/repo_list.go index 5bf0ecf03..253cc968c 100755 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -221,6 +221,7 @@ const ( SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC" SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC" SearchOrderByWatches SearchOrderBy = "num_watches DESC" + SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC" ) // SearchRepositoryCondition creates a query condition according search repository options diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index ca8c5071f..2d7592baf 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct { DataDate string `xorm:"NULL"` //cloudbraintask - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysis struct { @@ -159,17 +169,27 @@ type UserBusinessAnalysis struct { DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisQueryOptions struct { @@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS log.Info("truncate all data from table: " + tableName) statictisSess.Exec("TRUNCATE TABLE " + tableName) - log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) - log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) + StartTimeNextDay := pageStartTime.AddDate(0, 0, 1) + EndTimeNextDay := pageEndTime.AddDate(0, 0, 1) + log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05")) + log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05")) start_unix := pageStartTime.Unix() end_unix := pageEndTime.Unix() @@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS CommentCountMap := queryComment(start_unix, end_unix) FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) - WatchedCountMap := queryFollow(start_unix, end_unix) - CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix) + WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) + CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) @@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) AiModelManageMap := queryUserModel(start_unix, end_unix) + + CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) + RecommendDataset := queryRecommedDataSet(start_unix, end_unix) + CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) + RecommendImage := queryRecommedImage(start_unix, end_unix) + DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" cond := "type != 1 and is_active=true" @@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap) + dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap) dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) @@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap) - dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) - userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex - if maxUserIndex < dateRecordAll.UserIndex { - maxUserIndex = dateRecordAll.UserIndex + dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset) + dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset) + dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset) + dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage) + dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage) + dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage) + + dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight) + userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive + if maxUserIndex < dateRecordAll.UserIndexPrimitive { + maxUserIndex = dateRecordAll.UserIndexPrimitive } - if minUserIndex > dateRecordAll.UserIndex { - minUserIndex = dateRecordAll.UserIndex + if minUserIndex > dateRecordAll.UserIndexPrimitive { + minUserIndex = dateRecordAll.UserIndexPrimitive } dateRecordBatch = append(dateRecordBatch, dateRecordAll) if len(dateRecordBatch) >= BATCH_INSERT_SIZE { @@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static insertBatchSql := "INSERT INTO public." + tableName + "(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " + - "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " + + "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " + "VALUES" for i, record := range dateRecords { @@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static ", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) + ", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) + ", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) + - ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')" + ", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," + + fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")" if i < (len(dateRecords) - 1) { insertBatchSql += "," } @@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, CommentCountMap := queryComment(start_unix, end_unix) FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) - WatchedCountMap := queryFollow(start_unix, end_unix) + WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) CommitCodeSizeMap, err := GetAllUserKPIStats() if err != nil { @@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) AiModelManageMap := queryUserModel(start_unix, end_unix) + + CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix) + RecommendDataset := queryRecommedDataSet(start_unix, end_unix) + CollectImage, CollectedImage := queryImageStars(start_unix, end_unix) + RecommendImage := queryRecommedImage(start_unix, end_unix) + statictisSess := xStatistic.NewSession() defer statictisSess.Close() @@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap) dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap) dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap) - + dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap) if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { dateRecord.CommitCodeSize = 0 } else { dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) } - dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap) dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap) dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap) @@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap) - dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight) + + dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset) + dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset) + dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset) + dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage) + dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage) + dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage) + + dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) _, err = statictisSess.Insert(&dateRecord) if err != nil { @@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1 } - if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 { + if getUserActivate(dateRecord) > 0 { userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1 } @@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) + if codeLine >= limitCodeLine { + codeLine = limitCodeLine + } + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) @@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + + return result +} + +func getUserActivate(dateRecord UserBusinessAnalysis) int { + var result int + result += dateRecord.CodeMergeCount + result += dateRecord.CommitCount + result += dateRecord.IssueCount + result += dateRecord.CommentCount + result += dateRecord.FocusRepoCount + result += dateRecord.StarRepoCount + result += dateRecord.SolveIssueCount + result += dateRecord.EncyclopediasCount + result += dateRecord.CreateRepoCount + result += dateRecord.CloudBrainTaskNum + result += dateRecord.CommitModelCount + result += dateRecord.CommitDatasetNum + result += dateRecord.FocusOtherUser + result += dateRecord.CollectDataset + result += dateRecord.CollectImage + result += dateRecord.CommitCodeSize return result } @@ -831,7 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1) result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1) result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3) - result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1) + codeLine := float64(dateRecord.CommitCodeSize) + limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000) + if codeLine >= limitCodeLine { + codeLine = limitCodeLine + } + result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01) result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2) result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1) result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05) @@ -839,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64 result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2) result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1) + result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1) + result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1) + result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2) + result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1) + result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1) + result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2) + return result } @@ -1129,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int { return resultMap } -func queryFollow(start_unix int64, end_unix int64) map[int64]int { +func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { sess := x.NewSession() defer sess.Close() resultMap := make(map[int64]int) + resultFocusedByOtherMap := make(map[int64]int) cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) count, err := sess.Where(cond).Count(new(Follow)) if err != nil { log.Info("query follow error. return.") - return resultMap + return resultMap, resultFocusedByOtherMap } var indexTotal int64 indexTotal = 0 @@ -1155,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { } else { resultMap[followRecord.FollowID] += 1 } + if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok { + resultFocusedByOtherMap[followRecord.UserID] = 1 + } else { + resultFocusedByOtherMap[followRecord.UserID] += 1 + } } indexTotal += PAGE_SIZE @@ -1163,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int { } } - return resultMap + return resultMap, resultFocusedByOtherMap +} + +func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + userIdDdatasetMap := make(map[int64]int) + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true" + count, err := sess.Where(cond).Count(new(Dataset)) + if err != nil { + log.Info("query recommend dataset error. return.") + return userIdDdatasetMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetList := make([]*Dataset, 0) + sess.Find(&datasetList) + log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) + for _, datasetRecord := range datasetList { + if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { + userIdDdatasetMap[datasetRecord.UserID] = 1 + } else { + userIdDdatasetMap[datasetRecord.UserID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return userIdDdatasetMap +} + +func queryAllDataSet() (map[int64]int64, map[int64]int64) { + sess := x.NewSession() + defer sess.Close() + datasetUserIdMap := make(map[int64]int64) + userIdDdatasetMap := make(map[int64]int64) + count, err := sess.Count(new(Dataset)) + if err != nil { + log.Info("query dataset error. return.") + return datasetUserIdMap, userIdDdatasetMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetList := make([]*Dataset, 0) + sess.Find(&datasetList) + log.Info("query datasetList size=" + fmt.Sprint(len(datasetList))) + for _, datasetRecord := range datasetList { + datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID + if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok { + userIdDdatasetMap[datasetRecord.UserID] = 1 + } else { + userIdDdatasetMap[datasetRecord.UserID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return datasetUserIdMap, userIdDdatasetMap +} + +func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + userIdImageMap := make(map[int64]int) + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5" + count, err := sess.Where(cond).Count(new(Image)) + if err != nil { + log.Info("query recommend image error. return.") + return userIdImageMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageList := make([]*Image, 0) + sess.Find(&imageList) + log.Info("query imageList size=" + fmt.Sprint(len(imageList))) + for _, imageRecord := range imageList { + if _, ok := userIdImageMap[imageRecord.UID]; !ok { + userIdImageMap[imageRecord.UID] = 1 + } else { + userIdImageMap[imageRecord.UID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return userIdImageMap +} + +func queryAllImage() (map[int64]int64, map[int64]int64) { + sess := x.NewSession() + defer sess.Close() + imageUserIdMap := make(map[int64]int64) + userIdDImageMap := make(map[int64]int64) + count, err := sess.Count(new(Image)) + if err != nil { + log.Info("query image error. return.") + return imageUserIdMap, userIdDImageMap + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageList := make([]*Image, 0) + sess.Find(&imageList) + log.Info("query imageList size=" + fmt.Sprint(len(imageList))) + for _, imageRecord := range imageList { + imageUserIdMap[imageRecord.ID] = imageRecord.UID + if _, ok := userIdDImageMap[imageRecord.UID]; !ok { + userIdDImageMap[imageRecord.UID] = 1 + } else { + userIdDImageMap[imageRecord.UID] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return imageUserIdMap, userIdDImageMap +} + +func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { + sess := x.NewSession() + defer sess.Close() + datasetCollect := make(map[int64]int) + datasetCollected := make(map[int64]int) + datasetUserIdMap, _ := queryAllDataSet() + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + count, err := sess.Where(cond).Count(new(DatasetStar)) + if err != nil { + log.Info("query follow error. return.") + return datasetCollect, datasetCollected + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + datasetStarList := make([]*DatasetStar, 0) + sess.Find(&datasetStarList) + log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList))) + for _, datasetStarRecord := range datasetStarList { + if _, ok := datasetCollect[datasetStarRecord.UID]; !ok { + datasetCollect[datasetStarRecord.UID] = 1 + } else { + datasetCollect[datasetStarRecord.UID] += 1 + } + if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok { + datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1 + } else { + datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return datasetCollect, datasetCollected +} + +func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { + sess := x.NewSession() + defer sess.Close() + imageCollect := make(map[int64]int) + imageCollected := make(map[int64]int) + imageUserIdMap, _ := queryAllDataSet() + cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + count, err := sess.Where(cond).Count(new(ImageStar)) + if err != nil { + log.Info("query follow error. return.") + return imageCollect, imageCollected + } + var indexTotal int64 + indexTotal = 0 + for { + sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) + imageStarList := make([]*ImageStar, 0) + sess.Find(&imageStarList) + log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList))) + for _, imageStarRecord := range imageStarList { + if _, ok := imageCollect[imageStarRecord.UID]; !ok { + imageCollect[imageStarRecord.UID] = 1 + } else { + imageCollect[imageStarRecord.UID] += 1 + } + if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok { + imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1 + } else { + imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1 + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return imageCollect, imageCollected } func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { diff --git a/models/user_business_struct.go b/models/user_business_struct.go index 17d9f046f..86aecd545 100644 --- a/models/user_business_struct.go +++ b/models/user_business_struct.go @@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisLast30Day struct { @@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisLastMonth struct { @@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisCurrentMonth struct { @@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisCurrentWeek struct { @@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserBusinessAnalysisYesterday struct { @@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct { Name string `xorm:"NOT NULL"` DataDate string `xorm:"NULL"` - CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` - GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` - GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` - NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` - GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` - CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` - CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` - UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` - UserLocation string `xorm:"NULL"` + CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"` + GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"` + GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"` + NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"` + GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"` + CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"` + CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"` + UserIndex float64 `xorm:"NOT NULL DEFAULT 0"` + UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"` + + UserLocation string `xorm:"NULL"` + + FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"` + CollectDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectedDataset int `xorm:"NOT NULL DEFAULT 0"` + RecommendDataset int `xorm:"NOT NULL DEFAULT 0"` + CollectImage int `xorm:"NOT NULL DEFAULT 0"` + CollectedImage int `xorm:"NOT NULL DEFAULT 0"` + RecommendImage int `xorm:"NOT NULL DEFAULT 0"` } type UserAnalysisPara struct { diff --git a/modules/auth/cloudbrain.go b/modules/auth/cloudbrain.go index 85f3a2127..e5be38084 100755 --- a/modules/auth/cloudbrain.go +++ b/modules/auth/cloudbrain.go @@ -33,6 +33,16 @@ type CommitImageCloudBrainForm struct { Topics string `form:"topics"` } +type CommitAdminImageCloudBrainForm struct { + Description string `form:"description" binding:"Required"` + Type int `form:"type" binding:"Required"` + Tag string `form:"tag" binding:"Required;MaxSize(100)" ` + IsPrivate bool `form:"isPrivate" binding:"Required"` + Topics string `form:"topics"` + Place string `form:"place" binding:"Required"` + IsRecommend bool `form:"isRecommend" binding:"Required"` +} + type EditImageCloudBrainForm struct { ID int64 `form:"id" binding:"Required"` Description string `form:"description" binding:"Required"` diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index eaf680c65..f1d3cb344 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -475,6 +475,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e ComputeResource: task.ComputeResource, CreatedUnix: createTime, UpdatedUnix: createTime, + BranchName: task.BranchName, } err = models.RestartCloudbrain(task, newTask) diff --git a/modules/cloudbrain/resty.go b/modules/cloudbrain/resty.go index f1f213bea..1565d3044 100755 --- a/modules/cloudbrain/resty.go +++ b/modules/cloudbrain/resty.go @@ -312,12 +312,51 @@ sendjob: return nil }) if err == nil { - go updateImageStatus(image, isSetCreatedUnix, createTime) } return err } +func CommitAdminImage(params models.CommitImageParams) error { + + exist, err := models.IsImageExist(params.ImageTag) + + if err != nil { + return fmt.Errorf("resty CommitImage: %v", err) + } + if exist { + return models.ErrorImageTagExist{ + Tag: params.ImageTag, + } + } + + image := models.Image{ + CloudbrainType: params.CloudBrainType, + UID: params.UID, + IsPrivate: params.IsPrivate, + Tag: params.ImageTag, + Description: params.ImageDescription, + Place: params.Place, + Status: models.IMAGE_STATUS_SUCCESS, + Type: params.Type, + } + + err = models.WithTx(func(ctx models.DBContext) error { + + if err := models.CreateLocalImage(&image); err != nil { + log.Error("Failed to insert image record.", err) + return fmt.Errorf("resty CommitImage: %v", err) + } + + if err := models.SaveImageTopics(image.ID, params.Topics...); err != nil { + log.Error("Failed to insert image record.", err) + return fmt.Errorf("resty CommitImage: %v", err) + } + return nil + }) + return err +} + func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) { attemps := 5 commitSuccess := false diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 6050f21f7..7bcc5b550 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -19,7 +19,11 @@ import ( "xorm.io/builder" ) -func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { +const ( + SIZE_LIMIT_SCRIPT_NAME = "size_limit" +) + +func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []string) { hookNames = []string{"pre-receive", "update", "post-receive"} hookTpls = []string{ fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType), @@ -31,6 +35,11 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) { fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf), fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } + sizeLimitTpls = []string{ + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), + fmt.Sprintf(""), + fmt.Sprintf(""), + } return } @@ -41,7 +50,7 @@ func CreateDelegateHooks(repoPath string) error { // createDelegateHooks creates all the hooks scripts for the repo func createDelegateHooks(repoPath string) (err error) { - hookNames, hookTpls, giteaHookTpls := getHookTemplates() + hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() hookDir := filepath.Join(repoPath, "hooks") for i, hookName := range hookNames { @@ -74,8 +83,26 @@ func createDelegateHooks(repoPath string) (err error) { if err = ensureExecutable(newHookPath); err != nil { return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err) } + + if err = writeHookTpl(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i]); err != nil { + return err + } + } + + return nil +} + +func writeHookTpl(hookPath, content string) error { + if content == "" { + return nil + } + if err := ioutil.WriteFile(hookPath, []byte(content), 0777); err != nil { + return fmt.Errorf("write new hook file '%s': %v", hookPath, err) } + if err := ensureExecutable(hookPath); err != nil { + return fmt.Errorf("Unable to set %s executable. Error %v", hookPath, err) + } return nil } @@ -101,7 +128,7 @@ func ensureExecutable(filename string) error { // CheckDelegateHooks checks the hooks scripts for the repo func CheckDelegateHooks(repoPath string) ([]string, error) { - hookNames, hookTpls, giteaHookTpls := getHookTemplates() + hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates() hookDir := filepath.Join(repoPath, "hooks") results := make([]string, 0, 10) @@ -146,10 +173,34 @@ func CheckDelegateHooks(repoPath string) ([]string, error) { if !checkExecutable(newHookPath) { results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath)) } + if err = checkHookFile(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i], results); err != nil { + return results, err + } } return results, nil } +func generateHookScriptPath(hookDir, hookName, fileName string) string { + return filepath.Join(hookDir, hookName+".d", fileName) +} + +func checkHookFile(filePath, tpl string, results []string) error { + if tpl == "" { + return nil + } + contents, err := ioutil.ReadFile(filePath) + if err != nil { + return err + } + if string(contents) != tpl { + results = append(results, fmt.Sprintf("old hook file %s is out of date", filePath)) + } + if !checkExecutable(filePath) { + results = append(results, fmt.Sprintf("old hook file %s is not executable", filePath)) + } + return nil +} + // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks // to make sure the binary and custom conf path are up-to-date. func SyncRepositoryHooks(ctx context.Context) error { diff --git a/modules/setting/repository.go b/modules/setting/repository.go index dceb48f16..ee4f8b379 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -56,6 +56,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int } `ini:"-"` // Repository local settings @@ -125,6 +126,7 @@ var ( FileMaxSize int64 MaxFiles int TotalMaxSize int64 + ShellFlag int }{ Enabled: true, TempPath: "data/tmp/uploads", @@ -132,6 +134,7 @@ var ( FileMaxSize: 30, MaxFiles: 10, TotalMaxSize: 1024, + ShellFlag: 0, }, // Repository local settings diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index e7a694683..ac590a057 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -69,8 +69,17 @@ func sessionHandler(session ssh.Session) { os.Environ(), "SSH_ORIGINAL_COMMAND="+command, "SKIP_MINWINSVC=1", + models.EnvRepoMaxFileSize+"="+fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize+"="+fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag), ) + if strings.HasPrefix(command, "git-receive-pack") { + repo := getRepoFromCommandStr(command) + if repo != nil { + cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size)) + } + } stdout, err := cmd.StdoutPipe() if err != nil { log.Error("SSH: StdoutPipe: %v", err) @@ -131,6 +140,23 @@ func sessionHandler(session ssh.Session) { } } +func getRepoFromCommandStr(command string) *models.Repository { + repoPath := strings.TrimPrefix(command, "git-receive-pack '") + repoPath = strings.TrimSuffix(repoPath, ".git'") + if repoPath != "" { + nameArray := strings.Split(repoPath, "/") + if len(nameArray) >= 2 { + ownerName := nameArray[0] + repoName := nameArray[1] + if repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName); err == nil { + return repo + } + } + } + return nil + +} + func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool { if ctx.User() != setting.SSH.BuiltinServerUser { return false diff --git a/modules/storage/minio_ext.go b/modules/storage/minio_ext.go index 2f738ebad..167cd0488 100755 --- a/modules/storage/minio_ext.go +++ b/modules/storage/minio_ext.go @@ -2,6 +2,7 @@ package storage import ( "encoding/xml" + "errors" "path" "sort" "strconv" @@ -129,7 +130,7 @@ func NewMultiPartUpload(uuid string) (string, error) { return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{}) } -func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { +func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) { client, core, err := getClients() if err != nil { log.Error("getClients failed:", err.Error()) @@ -146,6 +147,11 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) { return "", err } + if len(partInfos) != totalChunks { + log.Error("ListObjectParts number(%d) is not equal the set total chunk number(%d)", len(partInfos), totalChunks) + return "", errors.New("the parts is not complete") + } + var complMultipartUpload completeMultipartUpload for _, partInfo := range partInfos { complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{ diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 08a354359..03349864a 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) { return hasObject, nil } +func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) { + output = &obs.ListPartsOutput{} + partNumberMarker := 0 + for { + temp, err := ObsCli.ListParts(&obs.ListPartsInput{ + Bucket: setting.Bucket, + Key: key, + UploadId: uploadID, + MaxParts: MAX_LIST_PARTS, + PartNumberMarker: partNumberMarker, + }) + if err != nil { + log.Error("ListParts failed:", err.Error()) + return output, err + } + + partNumberMarker = temp.NextPartNumberMarker + log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts)) + + for _, partInfo := range temp.Parts { + output.Parts = append(output.Parts, obs.Part{ + PartNumber: partInfo.PartNumber, + ETag: partInfo.ETag, + }) + } + + if !temp.IsTruncated { + break + } else { + continue + } + + break + } + + return output, nil +} + func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) { key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") - output, err := ObsCli.ListParts(&obs.ListPartsInput{ - Bucket: setting.Bucket, - Key: key, - UploadId: uploadID, - }) + allParts, err := listAllParts(uuid, uploadID, key) if err != nil { - log.Error("ListParts failed:", err.Error()) + log.Error("listAllParts failed: %v", err) return "", err } var chunks string - for _, partInfo := range output.Parts { + for _, partInfo := range allParts.Parts { chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + "," } @@ -94,45 +128,25 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) { return output.UploadId, nil } -func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { +func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error { input := &obs.CompleteMultipartUploadInput{} input.Bucket = setting.Bucket input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") input.UploadId = uploadID - partNumberMarker := 0 - for { - output, err := ObsCli.ListParts(&obs.ListPartsInput{ - Bucket: setting.Bucket, - Key: input.Key, - UploadId: uploadID, - MaxParts: MAX_LIST_PARTS, - PartNumberMarker: partNumberMarker, - }) - if err != nil { - log.Error("ListParts failed:", err.Error()) - return err - } - - partNumberMarker = output.NextPartNumberMarker - log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts)) - - for _, partInfo := range output.Parts { - input.Parts = append(input.Parts, obs.Part{ - PartNumber: partInfo.PartNumber, - ETag: partInfo.ETag, - }) - } - - if len(output.Parts) < output.MaxParts { - break - } else { - continue - } + allParts, err := listAllParts(uuid, uploadID, input.Key) + if err != nil { + log.Error("listAllParts failed: %v", err) + return err + } - break + if len(allParts.Parts) != totalChunks { + log.Error("listAllParts number(%d) is not equal the set total chunk number(%d)", len(allParts.Parts), totalChunks) + return errors.New("the parts is not complete") } + input.Parts = allParts.Parts + output, err := ObsCli.CompleteMultipartUpload(input) if err != nil { log.Error("CompleteMultipartUpload failed:", err.Error()) diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 006a1e046..dbb9354aa 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -40,6 +40,14 @@ import ( "github.com/editorconfig/editorconfig-core-go/v2" ) +const ( + REF_HEADS_PREFIX = "refs/heads/" + REF_TAGS_PREFIX = "refs/tags/" + REF_TYPE_BRANCH = "branch" + REF_TYPE_TAG = "tag" + REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)" +) + // Used from static.go && dynamic.go var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}[\s]*$`) @@ -317,6 +325,8 @@ func NewFuncMap() []template.FuncMap { "DatasetPathJoin": func(arr []string, index int, seq string) string { return strings.Join(arr[1:index+1], seq) }, + "GetRefType": GetRefType, + "GetRefName": GetRefName, }} } @@ -444,10 +454,12 @@ func SafeJS(raw string) template.JS { func Str2html(raw string) template.HTML { return template.HTML(markup.Sanitize(raw)) } + // -func subOne(length int)int{ - return length-1 +func subOne(length int) int { + return length - 1 } + // Escape escapes a HTML string func Escape(raw string) string { return html.EscapeString(raw) @@ -758,3 +770,18 @@ func licenses() []string { func tasks() []string { return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"} } + +func GetRefType(ref string) string { + if strings.HasPrefix(ref, REF_HEADS_PREFIX) { + return REF_TYPE_BRANCH + } + if strings.HasPrefix(ref, REF_TAGS_PREFIX) { + return REF_TYPE_TAG + } + return "" +} + +func GetRefName(ref string) string { + reg := regexp.MustCompile(REF_TYPE_PATTERN) + return reg.ReplaceAllString(ref, "") +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 14d4e19d1..ca16093a6 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count static.CloudBrainRunTime=CloudBrain Run Time static.CommitDatasetNum=Commit Dataset Count static.CommitModelCount=Commit Model Count -static.UserIndex=User Index +static.UserIndex=Normalized user index +static.UserIndexPrimitive=User Index static.countdate=Count Date +static.FocusOtherUser=Focus Other User Count +static.CollectDataset=Collect Dataset Count +static.CollectedDataset=Collected Dataset Count +static.RecommendDataset=Recommended Dataset Count +static.CollectImage=Collect Image Count +static.CollectedImage=Collected Image Count +static.RecommendImage=Recommended Image Count static.all=All static.public.user_business_analysis_current_month=Current_Month static.public.user_business_analysis_current_week=Current_Week @@ -1324,6 +1332,7 @@ issues.new.labels = Labels issues.new.add_labels_title = Apply labels issues.new.no_label = No Label issues.new.clear_labels = Clear labels +issues.new.clear_branch_tag = Clear branch or tag issues.new.no_items = No items issues.new.milestone = Milestone issues.new.add_milestone_title = Set milestone @@ -1353,6 +1362,13 @@ issues.remove_label_at = removed the
%[2]s to %[2]s at %[3]s from mirror approve_pull_request = `approved %s#%[2]s` reject_pull_request = `suggested changes for %s#%[2]s` -upload_dataset=`upload dataset %s` +upload_dataset=`upload dataset %s` task_gpudebugjob=`created CPU/GPU type debugging task%s` task_npudebugjob=`created NPU type debugging task %s` task_nputrainjob=`created NPU training task%s` @@ -2964,6 +2987,7 @@ snn4imagenet_path = Snn4imagenet script path brainscore_path = Brainscore script path start_command = Start command choose_mirror = select mirror or enter mirror path +input_mirror = Please enter image path select_dataset = select dataset specification = specification select_specification = select specification diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index de9f79319..474b96c3f 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数 static.CloudBrainRunTime=云脑运行时间(小时) static.CommitDatasetNum=上传(提交)数据集文件数 static.CommitModelCount=提交模型数 -static.UserIndex=用户指数 +static.UserIndex=归一化用户指数 +static.UserIndexPrimitive=用户指数 static.countdate=系统统计时间 +static.FocusOtherUser=关注他人数 +static.CollectDataset=收藏数据集 +static.CollectedDataset=被收藏数据集 +static.RecommendDataset=被推荐数据集数 +static.CollectImage=收藏镜像数 +static.CollectedImage=被收藏镜像数 +static.RecommendImage=被推荐镜像数 static.all=所有 static.public.user_business_analysis_current_month=本月 static.public.user_business_analysis_current_week=本周 @@ -1336,6 +1344,7 @@ issues.new.labels=标签 issues.new.add_labels_title=添加标签 issues.new.no_label=未选择标签 issues.new.clear_labels=清除选中标签 +issues.new.clear_branch_tag=清除选中分支/标签 issues.new.no_items=无可选项 issues.new.milestone=里程碑 issues.new.add_milestone_title=设置里程碑 @@ -1365,6 +1374,13 @@ issues.remove_label_at=删除了
%[2]s mirror_sync_delete=从镜像同步并从 %[3]s 删除了引用 %[2]s approve_pull_request=`同意了 %s#%[2]s` reject_pull_request=`建议变更 %s#%[2]s` -upload_dataset=`上传了数据集文件 %s` +upload_dataset=`上传了数据集文件 %s` task_gpudebugjob=`创建了CPU/GPU类型调试任务 %s` task_npudebugjob=`创建了NPU类型调试任务 %s` task_nputrainjob=`创建了NPU类型训练任务 %s` @@ -2974,6 +2997,7 @@ snn4imagenet_path = snn4imagenet脚本存放路径 brainscore_path = brainscore脚本存放路径 start_command = 启动命令 choose_mirror = 选择镜像或输入镜像地址 +input_mirror = 请输入云脑镜像地址 select_dataset = 选择数据集 specification = 规格 select_specification = 选择资源规格 diff --git a/public/home/home.js b/public/home/home.js index 478c70f21..d8e423def 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -99,6 +99,11 @@ socket.onmessage = function (e) { console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null."); continue; } + if(record.OpType == "24"){ + if(record.Content.indexOf("true") != -1){ + continue; + } + } var recordPrefix = getMsg(record); if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){ html += recordPrefix + actionName; @@ -162,7 +167,7 @@ socket.onmessage = function (e) { function getTaskLink(record){ var re = getRepoLink(record); if(record.OpType == 24){ - re = re + "/datasets?type=" + record.Content; + re = re + "/datasets"; }else if(record.OpType == 25){ re = re + "/cloudbrain/" + record.Content; }else if(record.OpType == 26){ diff --git a/public/home/search.js b/public/home/search.js index e23d27549..2fac95358 100644 --- a/public/home/search.js +++ b/public/home/search.js @@ -101,16 +101,20 @@ function initPageInfo(){ function searchItem(type,sortType){ console.log("enter item 2."); - currentSearchKeyword = document.getElementById("keyword_input").value; - if(!isEmpty(currentSearchKeyword)){ - initPageInfo(); - currentSearchTableName = itemType[type]; - currentSearchSortBy = sortBy[sortType]; - currentSearchAscending = sortAscending[sortType]; - OnlySearchLabel =false; - page(currentPage); + if(OnlySearchLabel){ + doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType]) }else{ - emptySearch(); + currentSearchKeyword = document.getElementById("keyword_input").value; + if(!isEmpty(currentSearchKeyword)){ + initPageInfo(); + currentSearchTableName = itemType[type]; + currentSearchSortBy = sortBy[sortType]; + currentSearchAscending = sortAscending[sortType]; + OnlySearchLabel =false; + page(currentPage); + }else{ + emptySearch(); + } } } @@ -806,17 +810,14 @@ var repoAndOrgEN={ function page(current){ currentPage=current; + doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel); - } function nextPage(){ currentPage = currentPage+1; console.log("currentPage=" + currentPage); - if(currentPage >= endIndex){ - startIndex=startIndex+1; - endIndex = endIndex +1; - } + page(currentPage); } @@ -824,10 +825,6 @@ function page(current){ console.log("currentPage=" + currentPage); if(currentPage > 1){ currentPage = currentPage-1; - if(currentPage <= startIndex && startIndex > 1){ - startIndex = startIndex -1; - endIndex = endIndex - 1; - } console.log("currentPage=" + (currentPage)); page(currentPage); } @@ -862,7 +859,7 @@ function getYPosition(e){ showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top); } else if(goNum<=totalPage){ - page(goNum); + page(parseInt(goNum,10)); } else{ showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top); @@ -884,9 +881,14 @@ function getYPosition(e){ var html =""; console.log("currentPage=" + currentPage); console.log("privateTotal=" + privateTotal); - // if(totalPage==0){ - // return; - // } + startIndex = currentPage -1; + if(startIndex < 1){ + startIndex = 1; + } + endIndex = currentPage + 2; + if(endIndex >= totalPage){ + endIndex = totalPage; + } html += "" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "" if(currentPage > 1){ html += "" + getLabel(isZh,"search_home_page") + ""; @@ -908,6 +910,11 @@ function getYPosition(e){ } } + if (endIndex < totalPage-1){ + html += "..."; + html += "" + totalPage + ""; + } + if(currentPage >=totalPage){ html += ""; html += "" + getLabel(isZh,"search_last_page") + ""; diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go index 91d866093..6687b990a 100755 --- a/routers/admin/cloudbrains.go +++ b/routers/admin/cloudbrains.go @@ -21,6 +21,7 @@ import ( const ( tplCloudBrains base.TplName = "admin/cloudbrain/list" tplImages base.TplName = "admin/cloudbrain/images" + tplCommitImages base.TplName = "admin/cloudbrain/imagecommit" EXCEL_DATE_FORMAT = "20060102150405" CREATE_TIME_FORMAT = "2006/01/02 15:04:05" ) @@ -114,6 +115,12 @@ func Images(ctx *context.Context) { } +func CloudBrainCommitImageShow(ctx *context.Context) { + ctx.Data["PageIsAdminImages"] = true + ctx.HTML(200, tplCommitImages) + +} + func DownloadCloudBrains(ctx *context.Context) { page := 1 diff --git a/routers/admin/dataset.go b/routers/admin/dataset.go index a4378cf67..6b29b06ff 100644 --- a/routers/admin/dataset.go +++ b/routers/admin/dataset.go @@ -1,6 +1,8 @@ package admin import ( + "net/http" + "strconv" "strings" "code.gitea.io/gitea/models" @@ -49,6 +51,8 @@ func Datasets(ctx *context.Context) { orderBy = models.SearchOrderBySizeReverse case "size": orderBy = models.SearchOrderBySize + case "downloadtimes": + orderBy = models.SearchOrderByDownloadTimes case "moststars": orderBy = models.SearchOrderByStarsReverse case "feweststars": @@ -70,6 +74,7 @@ func Datasets(ctx *context.Context) { PageSize: setting.UI.ExplorePagingNum, }, Keyword: keyword, + RecommendOnly: ctx.QueryBool("recommend"), SearchOrderBy: orderBy, }) if err != nil { @@ -80,7 +85,7 @@ func Datasets(ctx *context.Context) { ctx.Data["Keyword"] = keyword ctx.Data["Total"] = count ctx.Data["Datasets"] = datasets - + ctx.Data["Recommend"] = ctx.QueryBool("recommend") pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5) pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager @@ -88,6 +93,23 @@ func Datasets(ctx *context.Context) { ctx.HTML(200, tplDatasets) } +func DatasetAction(ctx *context.Context) { + var err error + datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64) + switch ctx.Params(":action") { + + case "recommend": + err = models.RecommendDataset(datasetId, true) + case "unrecommend": + err = models.RecommendDataset(datasetId, false) + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action")))) + } else { + ctx.JSON(http.StatusOK, models.BaseOKMessage) + } +} + func DeleteDataset(ctx *context.Context) { dataset, err := models.GetDatasetByID(ctx.QueryInt64("id")) if err != nil { diff --git a/routers/home.go b/routers/home.go index 324bb1032..500ffbbd6 100755 --- a/routers/home.go +++ b/routers/home.go @@ -309,9 +309,11 @@ func ExploreDatasets(ctx *context.Context) { orderBy = models.SearchOrderByStarsReverse case "feweststars": orderBy = models.SearchOrderByStars + case "default": + orderBy = models.SearchOrderByDefault default: - ctx.Data["SortType"] = "recentupdate" - orderBy = models.SearchOrderByRecentUpdated + ctx.Data["SortType"] = "default" + orderBy = models.SearchOrderByDefault } keyword := strings.Trim(ctx.Query("q"), " ") @@ -331,6 +333,7 @@ func ExploreDatasets(ctx *context.Context) { Task: task, License: license, OwnerID: ownerID, + RecommendOnly: ctx.QueryBool("recommend"), ListOptions: models.ListOptions{ Page: page, PageSize: 30, @@ -357,6 +360,7 @@ func ExploreDatasets(ctx *context.Context) { ctx.Data["Category"] = category ctx.Data["Task"] = task ctx.Data["License"] = license + ctx.Data["Recommend"] = ctx.QueryBool("recommend") pager.SetDefaultParams(ctx) ctx.Data["Page"] = pager diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go index 3c66a3537..aa52a1400 100755 --- a/routers/repo/attachment.go +++ b/routers/repo/attachment.go @@ -11,7 +11,6 @@ import ( "fmt" "mime/multipart" "net/http" - "path" "strconv" "strings" @@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) { }) } -func GetObsKey(ctx *context.Context) { - uuid := gouuid.NewV4().String() - key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/") - - ctx.JSON(200, map[string]string{ - "uuid": uuid, - "key": key, - "access_key_id": setting.AccessKeyID, - "secret_access_key": setting.SecretAccessKey, - "server": setting.Endpoint, - "bucket": setting.Bucket, - }) -} - func CompleteMultipart(ctx *context.Context) { uuid := ctx.Query("uuid") uploadID := ctx.Query("uploadID") @@ -870,13 +855,13 @@ func CompleteMultipart(ctx *context.Context) { } if typeCloudBrain == models.TypeCloudBrainOne { - _, err = storage.CompleteMultiPartUpload(uuid, uploadID) + _, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks) if err != nil { ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err)) return } } else { - err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName) + err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks) if err != nil { ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err)) return @@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) { ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) return } - + attachment.UpdateDatasetUpdateUnix() repository, _ := models.GetRepositoryByID(dataset.RepoID) - notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment) - + notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment) if attachment.DatasetID != 0 { if isCanDecompress(attachment.Name) { if typeCloudBrain == models.TypeCloudBrainOne { @@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) { }) } -func UpdateMultipart(ctx *context.Context) { - uuid := ctx.Query("uuid") - partNumber := ctx.QueryInt("chunkNumber") - etag := ctx.Query("etag") - - fileChunk, err := models.GetFileChunkByUUID(uuid) - if err != nil { - if models.IsErrFileChunkNotExist(err) { - ctx.Error(404) - } else { - ctx.ServerError("GetFileChunkByUUID", err) - } - return - } - - fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1)) - - err = models.UpdateFileChunk(fileChunk) - if err != nil { - ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err)) - return - } - - ctx.JSON(200, map[string]string{ - "result_code": "0", - }) -} - func HandleUnDecompressAttachment() { attachs, err := models.GetUnDecompressAttachments() if err != nil { diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 5daf96e78..b3b07f352 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -40,13 +40,11 @@ const ( tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new" tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show" - tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit" - tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit" - + tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit" + tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit" tplCloudBrainTrainJobNew base.TplName = "repo/cloudbrain/trainjob/new" tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show" - ) var ( @@ -59,6 +57,7 @@ var ( ) const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types" +const CLONE_FILE_PREFIX = "file:///" var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0) @@ -600,6 +599,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath ctx.Data["model_path"] = cloudbrain.ModelMountPath ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) + ctx.Data["branchName"] = task.BranchName ctx.HTML(200, tpName) } @@ -702,6 +702,53 @@ func CloudBrainCommitImageCheck(ctx *context.Context, form auth.CommitImageCloud } +func CloudBrainAdminCommitImage(ctx *context.Context, form auth.CommitAdminImageCloudBrainForm) { + + if !NamePattern.MatchString(form.Tag) { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err"))) + return + } + + if utf8.RuneCountInString(form.Description) > 255 { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255))) + return + } + + validTopics, errMessage := checkTopics(form.Topics) + if errMessage != "" { + ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage))) + return + } + + err := cloudbrain.CommitAdminImage(models.CommitImageParams{ + CommitImageCloudBrainParams: models.CommitImageCloudBrainParams{ + ImageDescription: form.Description, + ImageTag: form.Tag, + }, + IsPrivate: form.IsPrivate, + CloudBrainType: form.Type, + Topics: validTopics, + UID: ctx.User.ID, + Type: models.GetRecommondType(form.IsRecommend), + Place: form.Place, + }) + if err != nil { + log.Error("CommitImagefailed") + if models.IsErrImageTagExist(err) { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_exist"))) + + } else if models.IsErrorImageCommitting(err) { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing"))) + } else { + ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail"))) + } + + return + } + + ctx.JSON(200, models.BaseOKMessage) +} + func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) { if !NamePattern.MatchString(form.Tag) { @@ -1142,7 +1189,8 @@ func GetRate(ctx *context.Context) { } func downloadCode(repo *models.Repository, codePath, branchName string) error { - if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil { + //add "file:///" prefix to make the depth valid + if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil { log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err) return err } @@ -1202,7 +1250,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo return err } - if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil { + if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil { log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err) return err } @@ -1353,11 +1401,11 @@ func SyncCloudbrainStatus() { maxDuration = setting.MaxDuration } - if task.Duration >= maxDuration { - log.Info("begin to stop job(%s), because of the duration", task.JobName) + if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) { + log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName) err = cloudbrain.StopJob(task.JobID) if err != nil { - log.Error("StopJob(%s) failed:%v", task.JobName, err) + log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err) continue } task.Status = string(models.JobStopped) @@ -1367,7 +1415,8 @@ func SyncCloudbrainStatus() { task.ComputeAndSetDuration() err = models.UpdateJob(task) if err != nil { - log.Error("UpdateJob(%s) failed:%v", task.JobName, err) + log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) + continue } } } diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index 1a3762be3..73036a2cc 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -358,6 +358,7 @@ func MyDatasets(ctx *context.Context) { NeedIsPrivate: false, JustNeedZipFile: true, NeedRepoInfo: true, + RecommendOnly: ctx.QueryBool("recommend"), }) if err != nil { ctx.ServerError("datasets", err) @@ -398,6 +399,7 @@ func PublicDataset(ctx *context.Context) { Type: cloudbrainType, JustNeedZipFile: true, NeedRepoInfo: true, + RecommendOnly: ctx.QueryBool("recommend"), }) if err != nil { ctx.ServerError("datasets", err) @@ -454,6 +456,7 @@ func MyFavoriteDataset(ctx *context.Context) { Type: cloudbrainType, JustNeedZipFile: true, NeedRepoInfo: true, + RecommendOnly: ctx.QueryBool("recommend"), }) if err != nil { ctx.ServerError("datasets", err) diff --git a/routers/repo/http.go b/routers/repo/http.go index 87406a2c3..d96fea82e 100644 --- a/routers/repo/http.go +++ b/routers/repo/http.go @@ -256,6 +256,10 @@ func HTTP(ctx *context.Context) { models.EnvPusherName + "=" + authUser.Name, models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID), models.EnvIsDeployKey + "=false", + models.EnvRepoSize + "=" + fmt.Sprint(repo.Size), + models.EnvRepoMaxFileSize + "=" + fmt.Sprint(setting.Repository.Upload.FileMaxSize), + models.EnvRepoMaxSize + "=" + fmt.Sprint(setting.Repository.RepoMaxSize), + models.EnvPushSizeCheckFlag + "=" + fmt.Sprint(setting.Repository.Upload.ShellFlag), } if !authUser.KeepEmailPrivate { environ = append(environ, models.EnvPusherEmail+"="+authUser.Email) diff --git a/routers/repo/issue.go b/routers/repo/issue.go index 42a6b9609..d28936594 100755 --- a/routers/repo/issue.go +++ b/routers/repo/issue.go @@ -432,7 +432,7 @@ func RetrieveRepoMetas(ctx *context.Context, repo *models.Repository, isPull boo return nil } - brs, _, err := ctx.Repo.GitRepo.GetBranches(0,0) + brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0) if err != nil { ctx.ServerError("GetBranches", err) return nil @@ -1302,6 +1302,35 @@ func UpdateIssueContent(ctx *context.Context) { }) } +// UpdateIssueRef change issue's code reference +func UpdateIssueRef(ctx *context.Context) { + issues := getActionIssues(ctx) + if ctx.Written() { + return + } + + issue := issues[0] + if issue == nil { + log.Error("UpdateIssueRef param error ") + return + } + + if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) { + ctx.Error(403) + return + } + + ref := ctx.Query("id") + if err := issue_service.ChangeRef(issue, ctx.User, ref); err != nil { + ctx.ServerError("ChangeRef", err) + return + } + + ctx.JSON(200, map[string]interface{}{ + "ref": issue.Ref, + }) +} + // UpdateIssueMilestone change issue's milestone func UpdateIssueMilestone(ctx *context.Context) { issues := getActionIssues(ctx) diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 318726e8e..81e1664a4 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -51,6 +51,9 @@ const ( func DebugJobIndex(ctx *context.Context) { listType := ctx.Query("debugListType") + if listType == "" { + listType = models.AllResource + } ctx.Data["ListType"] = listType MustEnableCloudbrain(ctx) repo := ctx.Repo.Repository @@ -247,7 +250,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm func NotebookShow(ctx *context.Context) { ctx.Data["PageIsCloudBrain"] = true debugListType := ctx.Query("debugListType") - + if debugListType == "" { + debugListType = "all" + } var ID = ctx.Params(":id") task, err := models.GetCloudbrainByIDWithDeleted(ID) if err != nil { @@ -1027,10 +1032,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { - log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { + log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err) trainJobErrorNewDataPrepare(ctx, form) ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form) return @@ -1245,9 +1248,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err) versionErrorDataPrepare(ctx, form) ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form) @@ -1475,9 +1476,9 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error { return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.BranchName == "" { log.Error("the branch must not be null!", form.BranchName) @@ -1493,9 +1494,9 @@ func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) err return errors.New("启动文件必须是python文件") } - if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 { - log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber) - return errors.New("计算节点数必须在1-25之间") + if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 { + log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber) + return errors.New("计算节点数必须在1-2之间") } if form.ModelName == "" { @@ -1874,9 +1875,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference gitRepo, _ := git.OpenRepository(repo.RepoPath()) commitID, _ := gitRepo.GetBranchCommitID(branch_name) - if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{ - Branch: branch_name, - }); err != nil { + if err := downloadCode(repo, codeLocalPath, branch_name); err != nil { log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err) inferenceJobErrorNewDataPrepare(ctx, form) ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 8bc9dc458..2280e8288 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -19,6 +19,130 @@ const ( PAGE_SIZE = 2000 ) +func getExcelHeader(ctx *context.Context) map[string]string { + excelHeader := make([]string, 0) + excelHeader = append(excelHeader, ctx.Tr("user.static.id")) + excelHeader = append(excelHeader, ctx.Tr("user.static.name")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive")) + excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.logincount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize")) + excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount")) + excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage")) + excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage")) + + excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) + excelHeader = append(excelHeader, ctx.Tr("user.static.countdate")) + + excelHeaderMap := make(map[string]string, 0) + var i byte + i = 0 + for _, value := range excelHeader { + excelColumn := getColumn(i) + fmt.Sprint(1) + log.Info("excelColumn=" + excelColumn) + excelHeaderMap[excelColumn] = value + i++ + } + return excelHeaderMap +} + +func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) { + rows := fmt.Sprint(row) + var tmp byte + tmp = 0 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage) + tmp = tmp + 1 + formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) + tmp = tmp + 1 + + formatTime = userRecord.DataDate + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime) +} +func getColumn(tmp byte) string { + var tmpA byte + tmpA = 'A' + if tmp < 26 { + return string(tmpA + tmp) + } else { + return "A" + string(tmpA+(tmp-26)) + } +} + func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) { page := ctx.QueryInt("page") if page <= 0 { @@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.UserIndex"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) @@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac log.Info("return count=" + fmt.Sprint(count)) for _, userRecord := range re { row++ - rows := fmt.Sprint(row) - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + writeExcel(row, xlsx, sheetName, userRecord) } indexTotal += PAGE_SIZE @@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) { sheetName := ctx.Tr("user.static.sheetname") index := xlsx.NewSheet(sheetName) xlsx.DeleteSheet("Sheet1") - dataHeader := map[string]string{ - "A1": ctx.Tr("user.static.id"), - "B1": ctx.Tr("user.static.name"), - "C1": ctx.Tr("user.static.codemergecount"), - "D1": ctx.Tr("user.static.UserIndex"), - "E1": ctx.Tr("user.static.commitcount"), - "F1": ctx.Tr("user.static.issuecount"), - "G1": ctx.Tr("user.static.commentcount"), - "H1": ctx.Tr("user.static.focusrepocount"), - "I1": ctx.Tr("user.static.starrepocount"), - "J1": ctx.Tr("user.static.logincount"), - "K1": ctx.Tr("user.static.watchedcount"), - "L1": ctx.Tr("user.static.commitcodesize"), - "M1": ctx.Tr("user.static.solveissuecount"), - "N1": ctx.Tr("user.static.encyclopediascount"), - "O1": ctx.Tr("user.static.createrepocount"), - "P1": ctx.Tr("user.static.openiindex"), - "Q1": ctx.Tr("user.static.CloudBrainTaskNum"), - "R1": ctx.Tr("user.static.CloudBrainRunTime"), - "S1": ctx.Tr("user.static.CommitDatasetNum"), - "T1": ctx.Tr("user.static.CommitModelCount"), - "U1": ctx.Tr("user.static.registdate"), - "V1": ctx.Tr("user.static.countdate"), - } + + dataHeader := getExcelHeader(ctx) for k, v := range dataHeader { //设置单元格的值 xlsx.SetCellValue(sheetName, k, v) } for i, userRecord := range re { - rows := fmt.Sprint(i + 2) - - xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID) - xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name) - xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount) - xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex)) - xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount) - xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount) - xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount) - xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount) - xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount) - xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount) - xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount) - xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize) - xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount) - xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount) - xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount) - xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex)) - xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum) - xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600)) - xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum) - xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount) - formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") - xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3]) - formatTime = userRecord.DataDate - xlsx.SetCellValue(sheetName, "V"+rows, formatTime) + row := i + 2 + writeExcel(row, xlsx, sheetName, userRecord) } //设置默认打开的表单 diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 9df429e8b..912ed6238 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -525,6 +525,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/datasets", func() { m.Get("", admin.Datasets) + m.Put("/:id/action/:action", admin.DatasetAction) // m.Post("/delete", admin.DeleteDataset) }) m.Group("/cloudbrains", func() { @@ -534,6 +535,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/images", func() { m.Get("", admin.Images) m.Get("/data", repo.GetAllImages) + m.Get("/commit_image", admin.CloudBrainCommitImageShow) + m.Post("/commit_image", bindIgnErr(auth.CommitAdminImageCloudBrainForm{}), repo.CloudBrainAdminCommitImage) }) m.Put("/image/:id/action/:action", image.Action) @@ -608,12 +611,11 @@ func RegisterRoutes(m *macaron.Macaron) { m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload) m.Get("/obs_proxy_download", repo.GetOBSProxyDownload) m.Get("/get_multipart_url", repo.GetMultipartUploadUrl) - m.Post("/complete_multipart", repo.CompleteMultipart) - m.Post("/update_chunk", repo.UpdateMultipart) }, reqSignIn) m.Group("/attachments", func() { m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState) + m.Post("/complete_multipart", repo.CompleteMultipart) }) m.Group("/attachments", func() { @@ -890,6 +892,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/labels", reqRepoIssuesOrPullsWriter, repo.UpdateIssueLabel) m.Post("/milestone", reqRepoIssuesOrPullsWriter, repo.UpdateIssueMilestone) m.Post("/assignee", reqRepoIssuesOrPullsWriter, repo.UpdateIssueAssignee) + m.Post("/ref", reqRepoIssuesOrPullsWriter, repo.UpdateIssueRef) m.Post("/request_review", reqRepoIssuesOrPullsReader, repo.UpdatePullReviewRequest) m.Post("/status", reqRepoIssuesOrPullsWriter, repo.UpdateIssueStatus) m.Post("/resolve_conversation", reqRepoIssuesOrPullsReader, repo.UpdateResolveConversation) diff --git a/routers/search.go b/routers/search.go index 1cf78666e..fe1643c80 100644 --- a/routers/search.go +++ b/routers/search.go @@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) topicsQuery := elastic.NewMatchQuery("topics", Key) boolQ.Should(topicsQuery) - res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) + res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) } } -func getSort(SortBy string, ascending bool) elastic.Sorter { - var sort elastic.Sorter - sort = elastic.NewScoreSort() - if SortBy != "" { - if SortBy == "default" { - return sort +func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter { + sort := make([]elastic.Sorter, 0) + if SortBy == "default" || SortBy == "" { + sort = append(sort, elastic.NewScoreSort()) + if secondSortBy != "" { + log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy) + sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending)) } - return elastic.NewFieldSort(SortBy).Order(ascending) + } else { + sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending)) } + log.Info("sort size=" + fmt.Sprint(len(sort))) return sort } @@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third") boolQ.Should(nameQuery, descriptionQuery, topicsQuery) - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa } else { log.Info("query all content.") //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} - res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) + res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in boolQ.Must(UserOrOrgQuery) } - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third") categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth") boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery) - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, } else { log.Info("query all datasets.") //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}} - res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context()) + res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) @@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in boolQ.Must(isIssueQuery) } - res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) + res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context()) if err == nil { searchJson, _ := json.Marshal(res) log.Info("searchJson=" + string(searchJson)) diff --git a/routers/user/profile.go b/routers/user/profile.go index 41d8561d6..f82c03a75 100755 --- a/routers/user/profile.go +++ b/routers/user/profile.go @@ -106,9 +106,9 @@ func Profile(ctx *context.Context) { for _, org := range orgs { _, repoCount, err := models.SearchRepository(&models.SearchRepoOptions{ - OwnerID: org.ID, - Private: ctx.IsSigned, - Actor: ctx.User, + OwnerID: org.ID, + Private: ctx.IsSigned, + Actor: ctx.User, }) if err != nil { ctx.ServerError("SearchRepository", err) @@ -175,6 +175,8 @@ func Profile(ctx *context.Context) { orderBy = models.SearchOrderByAlphabeticallyReverse case "alphabetically": orderBy = models.SearchOrderByAlphabetically + case "downloadtimes": + orderBy = models.SearchOrderByDownloadTimes case "moststars": orderBy = models.SearchOrderByStarsReverse case "feweststars": diff --git a/services/issue/content.go b/services/issue/content.go index 1081e30b5..387930449 100644 --- a/services/issue/content.go +++ b/services/issue/content.go @@ -21,3 +21,12 @@ func ChangeContent(issue *models.Issue, doer *models.User, content string) (err return nil } + +// ChangeRef changes issue ref, as the given user. +func ChangeRef(issue *models.Issue, doer *models.User, ref string) (err error) { + if err := issue.ChangeRef(doer, ref); err != nil { + return err + } + + return nil +} diff --git a/services/socketwrap/clientManager.go b/services/socketwrap/clientManager.go index 61f356a66..6ffa96933 100755 --- a/services/socketwrap/clientManager.go +++ b/services/socketwrap/clientManager.go @@ -10,7 +10,7 @@ import ( "github.com/elliotchance/orderedmap" ) -var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31} +var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31} type ClientsManager struct { Clients *orderedmap.OrderedMap diff --git a/templates/admin/cloudbrain/imagecommit.tmpl b/templates/admin/cloudbrain/imagecommit.tmpl new file mode 100644 index 000000000..21ab73104 --- /dev/null +++ b/templates/admin/cloudbrain/imagecommit.tmpl @@ -0,0 +1,128 @@ + +{{template "base/head" .}} +
+
+
+
+
+
+
+
+
+
+ {{template "repo/header" .}} +
+
+
+ + + +

+ {{.i18n.Tr "repo.submit_image"}} +

+ +
+
+ + {{.CsrfTokenHtml}} +
+ +
+ + CPU/GPU +
+ +
+
+ + + {{.i18n.Tr "repo.images.name_rule"}} +
+
+ + + +
+
+ + +
+
+   + +
+ {{.i18n.Tr "repo.image.label_tooltips"}} +
+ +
+
+ + +
+
+
+
+ + +
+
+ +
+
+ +
+
+ + +
+
+
+
+ + +
+
+
+ {{.i18n.Tr "repo.images.public_tooltips"}} +
+
+
+ + + {{.i18n.Tr "repo.cloudbrain.cancel"}} +
+
+
+
+
+
+ + +
+ +
+{{template "base/footer" .}} \ No newline at end of file diff --git a/templates/admin/cloudbrain/images.html b/templates/admin/cloudbrain/images.tmpl similarity index 100% rename from templates/admin/cloudbrain/images.html rename to templates/admin/cloudbrain/images.tmpl diff --git a/templates/admin/dataset/list.tmpl b/templates/admin/dataset/list.tmpl index 1044e4c28..9e4e72b68 100644 --- a/templates/admin/dataset/list.tmpl +++ b/templates/admin/dataset/list.tmpl @@ -3,12 +3,23 @@ {{template "admin/navbar" .}}
{{template "base/alert" .}} +

{{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}})

{{template "admin/dataset/search" .}}
+
+
+
+ + +
+
+
+
@@ -24,10 +35,10 @@ {{range .Datasets}} - + - + {{end}} @@ -37,16 +48,4 @@ {{template "base/paginate" .}} - - {{template "base/footer" .}} diff --git a/templates/admin/dataset/search.tmpl b/templates/admin/dataset/search.tmpl index 5d01d836c..6b42fb6cf 100644 --- a/templates/admin/dataset/search.tmpl +++ b/templates/admin/dataset/search.tmpl @@ -6,18 +6,18 @@ @@ -26,4 +26,4 @@ - + \ No newline at end of file diff --git a/templates/custom/select_dataset.tmpl b/templates/custom/select_dataset.tmpl index cd4ce50da..befd186c5 100644 --- a/templates/custom/select_dataset.tmpl +++ b/templates/custom/select_dataset.tmpl @@ -23,7 +23,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias} ${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -49,7 +49,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -74,7 +74,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -99,7 +99,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
diff --git a/templates/custom/select_dataset_train.tmpl b/templates/custom/select_dataset_train.tmpl index 3da9be9aa..b8260f279 100644 --- a/templates/custom/select_dataset_train.tmpl +++ b/templates/custom/select_dataset_train.tmpl @@ -23,7 +23,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias} ${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias} ${dataset.Name}
@@ -49,7 +49,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -74,7 +74,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -99,7 +99,7 @@
-
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
+
${dataset.Repo.OwnerName}/${dataset.Repo.Alias}${dataset.Name}
@@ -139,6 +141,7 @@ {{end}}
+ 仅显示平台推荐
{{range $k, $v :=.Datasets}}
@@ -147,6 +150,8 @@ {{.Repo.OwnerName}} / {{.Repo.Alias}} {{if $.IsSigned}} + + {{.DownloadTimes}}
@@ -154,6 +159,8 @@
{{else}} + + {{.DownloadTimes}}
@@ -161,7 +168,7 @@
{{end}}
-
{{.Title}}
+
{{.Title}}{{if .Recommend}}{{end}}
{{if or (.Category) (.Task) (.License)}}
{{if .Category}} diff --git a/templates/mail/auth/activate.tmpl b/templates/mail/auth/activate.tmpl old mode 100644 new mode 100755 index 37fdbd7c7..d2996f38d --- a/templates/mail/auth/activate.tmpl +++ b/templates/mail/auth/activate.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/activate?code={{.Code}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/activate_email.tmpl b/templates/mail/auth/activate_email.tmpl old mode 100644 new mode 100755 index ebcaa0ee7..50ef9adcd --- a/templates/mail/auth/activate_email.tmpl +++ b/templates/mail/auth/activate_email.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/activate_email?code={{.Code}}&email={{.Email}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/register_notify.tmpl b/templates/mail/auth/register_notify.tmpl old mode 100644 new mode 100755 index ea1857030..83950c260 --- a/templates/mail/auth/register_notify.tmpl +++ b/templates/mail/auth/register_notify.tmpl @@ -11,5 +11,6 @@

{{AppUrl}}user/login

If this account has been created for you, please set your password first.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/auth/reset_passwd.tmpl b/templates/mail/auth/reset_passwd.tmpl old mode 100644 new mode 100755 index e01d57cea..f24c1dc31 --- a/templates/mail/auth/reset_passwd.tmpl +++ b/templates/mail/auth/reset_passwd.tmpl @@ -12,5 +12,6 @@

{{AppUrl}}user/recover_account?code={{.Code}}

Not working? Try copying and pasting it to your browser.

© {{AppName}}

+

退订(TD)

diff --git a/templates/mail/issue/assigned.tmpl b/templates/mail/issue/assigned.tmpl old mode 100644 new mode 100755 index d302a16f2..bd9fd015d --- a/templates/mail/issue/assigned.tmpl +++ b/templates/mail/issue/assigned.tmpl @@ -15,6 +15,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/mail/issue/default.tmpl b/templates/mail/issue/default.tmpl old mode 100644 new mode 100755 index 7cd397527..e3b426d40 --- a/templates/mail/issue/default.tmpl +++ b/templates/mail/issue/default.tmpl @@ -53,6 +53,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/mail/notify/collaborator.tmpl b/templates/mail/notify/collaborator.tmpl old mode 100644 new mode 100755 index 947b40439..d878a0ff5 --- a/templates/mail/notify/collaborator.tmpl +++ b/templates/mail/notify/collaborator.tmpl @@ -15,6 +15,8 @@ ---
View it on {{AppName}}. +
+ 退订(TD)

diff --git a/templates/repo/attachment/upload.tmpl b/templates/repo/attachment/upload.tmpl index 42aac99df..56dc52417 100644 --- a/templates/repo/attachment/upload.tmpl +++ b/templates/repo/attachment/upload.tmpl @@ -13,17 +13,15 @@ {{.CsrfTokenHtml}} - CPU/GPU - NPU - + CPU/GPU + NPU - - - + + - +
+ + +
+ + + + diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl index e8d9b65e3..ba4842f57 100755 --- a/templates/repo/datasets/index.tmpl +++ b/templates/repo/datasets/index.tmpl @@ -281,7 +281,7 @@ {{$.i18n.Tr "dataset.copy_url"}} - {{$.i18n.Tr "dataset.copy_md5"}} + {{if and ($.CanWrite) (eq .DecompressState 1) }} {{$.i18n.Tr "dataset.annotation"}} {{end}} diff --git a/templates/repo/issue/branch_selector_field.tmpl b/templates/repo/issue/branch_selector_field.tmpl index 4f80c13e5..265c14765 100644 --- a/templates/repo/issue/branch_selector_field.tmpl +++ b/templates/repo/issue/branch_selector_field.tmpl @@ -14,12 +14,12 @@
- + {{svg "octicon-git-branch" 16}} {{.i18n.Tr "repo.branches"}} - + {{.i18n.Tr "repo.tags"}} diff --git a/templates/repo/issue/view_content/comments.tmpl b/templates/repo/issue/view_content/comments.tmpl index e3c7df674..796054005 100644 --- a/templates/repo/issue/view_content/comments.tmpl +++ b/templates/repo/issue/view_content/comments.tmpl @@ -594,5 +594,40 @@ {{end}}
+ {{else if eq .Type 29}} +
+ {{svg "octicon-git-branch" 16}} + + + + + {{.Poster.GetDisplayName}} + + {{ $refOldName:= GetRefName .OldRef }} + {{ $refNewName:= GetRefName .NewRef }} + + {{if .OldRef }} + {{if .NewRef }} + {{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{ $getRefOldType:= GetRefType .OldRef }} + {{ if eq $getRefOldType "branch"}} + {{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}} + {{end}} + {{end}} + {{else}} + {{if .NewRef}} + {{ $getRefNewType:= GetRefType .NewRef }} + {{ if eq $getRefNewType "branch"}} + {{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}} + {{else}} + {{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}} + {{end}} + {{end}} + {{end}} + +
{{end}} {{end}} diff --git a/templates/repo/issue/view_content/sidebar.tmpl b/templates/repo/issue/view_content/sidebar.tmpl index bcc69a48b..e6a61a567 100644 --- a/templates/repo/issue/view_content/sidebar.tmpl +++ b/templates/repo/issue/view_content/sidebar.tmpl @@ -1,6 +1,52 @@
- {{template "repo/issue/branch_selector_field" .}} + + {{if and (not .Issue.IsPull) (not .PageIsComparePull)}} + + + +
+ {{end}} {{if .Issue.IsPull }} @@ -600,3 +646,4 @@
{{end}} {{end}} + diff --git a/templates/repo/modelarts/notebook/show.tmpl b/templates/repo/modelarts/notebook/show.tmpl index 0d7a01efb..26839345e 100755 --- a/templates/repo/modelarts/notebook/show.tmpl +++ b/templates/repo/modelarts/notebook/show.tmpl @@ -193,7 +193,7 @@ td, th { {{.i18n.Tr "repo.cloudbrain"}}
/
- + {{$.i18n.Tr "repo.modelarts.notebook"}}
/
diff --git a/templates/repo/modelarts/trainjob/new.tmpl b/templates/repo/modelarts/trainjob/new.tmpl index a37c5712a..cff51c524 100755 --- a/templates/repo/modelarts/trainjob/new.tmpl +++ b/templates/repo/modelarts/trainjob/new.tmpl @@ -233,8 +233,13 @@
- - + +
+ +
@@ -263,19 +268,20 @@ $('.menu .item') .tab(); - let sever_num = $('#trainjob_work_server_num') - $('.add').click(function(){ - sever_num.val(parseInt(sever_num.val())+1) - if(sever_num.val()>=26){ - sever_num.val(parseInt(sever_num.val())-1) - } - }) - $('.min').click(function(){ - sever_num.val(parseInt(sever_num.val())-1) - if(sever_num.val()<=0){ - sever_num.val(parseInt(sever_num.val())+1) - } - }) + // let sever_num = $("#trainjob_work_server_num_select .text").text() //$('#trainjob_work_server_num') + // console.log("sever_num:",sever_num) + // $('.add').click(function(){ + // sever_num.val(parseInt(sever_num.val())+1) + // if(sever_num.val()>=26){ + // sever_num.val(parseInt(sever_num.val())-1) + // } + // }) + // $('.min').click(function(){ + // sever_num.val(parseInt(sever_num.val())-1) + // if(sever_num.val()<=0){ + // sever_num.val(parseInt(sever_num.val())+1) + // } + // }) // 参数增加、删除、修改、保存 function Add_parameter(i){ value = '
' + @@ -349,7 +355,7 @@ // $("select[name='pool_id']").val(parameters[i]); // break; case (6): - $("input[name='work_server_number']").val(parameters[i]); + // $("input[name='work_server_number']").val(parameters[i]); break; } } @@ -456,6 +462,10 @@ $("input#ai_engine_name").val(name1) $("input#ai_flaver_name").val(name2) + let val_server_num_select = $("#trainjob_work_server_num_select .text").text() + // console.log("val_server_num_select:",val_server_num_select) + $("input#trainjob_work_server_num").val(val_server_num_select) + } $('.ui.create_train_job.green.button').click(function(e) { get_name() diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index 0b420c484..3b370b26d 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -249,7 +249,7 @@ td, th {
@@ -420,11 +420,17 @@ td, th {
-
+
+ + + + + + -
+

@@ -830,15 +836,28 @@ td, th {
             html += "
" $(`#dir_list${version_name}`).append(html) } + function debounce(fn,delay){ + let timer; + return (...args) => { + // 判断定时器是否存在,清除定时器 + if (timer) { + clearTimeout(timer); + } + // 重新调用setTimeout + timer = setTimeout(() => { + fn.apply(this, args); + }, delay); + }; + } + const fn = debounce(logScroll, 500) function logScroll(version_name) { - let container = document.querySelector(`#log${version_name}`) let scrollTop = container.scrollTop let scrollHeight = container.scrollHeight let clientHeight = container.clientHeight let scrollLeft = container.scrollLeft - if((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight) && (scrollLeft===0)){ + if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0 && scrollLeft==0){ let end_line = $(`#log${version_name} input[name=end_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => { if (data.Lines == 0){ @@ -861,7 +880,7 @@ td, th { console.log(err); }); } - if(scrollTop == 0 && scrollLeft==0){ + if([0,1,2,3,4,5,6,7,8,9,10].includes(scrollTop) && scrollLeft==0){ let start_line = $(`#log${version_name} input[name=start_line]`).val() $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => { if (data.Lines == 0){ @@ -879,4 +898,78 @@ td, th { }); } } + function scrollAnimation(dom, currentY, targetY, currentX) { + let needScrollTop = targetY - currentY; + let _currentY = currentY; + setTimeout(() => { + // 一次调用滑动帧数,每次调用会不一样 + //取总距离的十分之一 + const dist = Math.ceil(needScrollTop / 10); + _currentY += dist; + //移动一个十分之一 + dom.scrollTo(currentX || 0, _currentY,'smooth'); + // 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果 + if (needScrollTop > 10 || needScrollTop < -10) { + scrollAnimation(dom, _currentY, targetY) + } else { + dom.scrollTo(0, targetY,'smooth') + } + }, 1) + } + + $('.log_top').click(function(){ + // let logContentDom = document.querySelector('.log') + // if(!logContentDom) + // return + // let version_name = $('.log_top').data('version') + let version_name = $(this).data('version') + let logContentDom = document.querySelector(`#log${version_name}`) + + $(`#log_file${version_name}`).siblings('pre').remove() + $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => { + + $(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值 + $(`#log${version_name} input[name=start_line]`).val(data.StartLine) + $(`#log${version_name}`).prepend('
' + data.Content)
+            $(`.message${version_name} #header`).text('您已翻阅至日志顶部')
+            $(`.message${version_name}`).css('display', 'block')
+            setTimeout(function(){
+                $(`.message${version_name}`).css('display', 'none')
+            }, 1000)
+            scrollAnimation(logContentDom, logContentDom.scrollTop, 0);
+        })
+
+    })
+    $('.log_bottom').click(function(e){
+        let version_name = $(this).data('version')
+        let logContentDom = document.querySelector(`#log${version_name}`)
+        $(`#log_file${version_name}`).siblings('pre').remove()
+        $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => {
+            
+            $(`#log${version_name} input[name=end_line]`).val(data.EndLine)   //如果变动就改变所对应的值
+            $(`#log${version_name} input[name=start_line]`).val(data.StartLine)
+            $(`#log${version_name}`).append('
' + data.Content)
+            $.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${data.EndLine}&lines=50&order=desc`, (data) => {
+                if (data.Lines == 0){
+                    $(`.message${version_name} #header`).text('您已翻阅至日志底部')
+                    $(`.message${version_name}`).css('display', 'block')
+                    setTimeout(function(){
+                        $(`.message${version_name}`).css('display', 'none')
+                    }, 1000)
+                }else{
+                    if(end_line===data.EndLine){
+                        return
+                    }
+                    else{
+                        $(`#log${version_name} input[name=end_line]`).val(data.EndLine)
+                        $(`#log${version_name}`).append('
' + data.Content)
+                    }
+
+                }
+            }).fail(function(err) {
+                console.log(err);
+            });
+            scrollAnimation(logContentDom, logContentDom.scrollTop+1, logContentDom.scrollHeight - logContentDom.clientHeight);
+        })
+    })
 
diff --git a/templates/user/dashboard/feeds.tmpl b/templates/user/dashboard/feeds.tmpl
index 614c328c6..171287acb 100755
--- a/templates/user/dashboard/feeds.tmpl
+++ b/templates/user/dashboard/feeds.tmpl
@@ -71,7 +71,7 @@
 							{{ $index := index .GetIssueInfos 0}}
 							{{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}}
 						{{else if eq .GetOpType 24}}
-							{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}}
+							{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}}
 						{{else if eq .GetOpType 25}}
 							{{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}}
 						{{else if eq .GetOpType 26}}
diff --git a/web_src/js/components/MinioUploader.vue b/web_src/js/components/MinioUploader.vue
index 8c33608e7..167bb8c5a 100755
--- a/web_src/js/components/MinioUploader.vue
+++ b/web_src/js/components/MinioUploader.vue
@@ -1,25 +1,31 @@
 
 
 
-
-
\ No newline at end of file
diff --git a/web_src/js/components/UserAnalysis.vue b/web_src/js/components/UserAnalysis.vue
index 596ecd602..c0e8c7411 100755
--- a/web_src/js/components/UserAnalysis.vue
+++ b/web_src/js/components/UserAnalysis.vue
@@ -63,20 +63,29 @@
                       
                 
                 
-                
+                    
+                 
                 
                     
                  
                 
+                
+                
@@ -161,6 +170,48 @@
                     width="120px"
                     align="center">
                  
+                
+                 
+                
+                 
+                
+                 
+                
+                 
+                
+                 
+                
+                 
+                
+                 
                  
-
+
+
@@ -11,24 +12,24 @@
- - - 仅显示平台推荐 - - - + +
+ 仅显示平台推荐 + {{dropdownPrivate}} 全部 公开 - 私有 + 私有 - - - - + +
+ +
{{scope.row.numStars}}
- - - + 设为推荐 复制地址
@@ -138,7 +135,7 @@ - +
+
diff --git a/web_src/js/features/images.js b/web_src/js/features/images.js index 25da0aee2..0cafb3901 100644 --- a/web_src/js/features/images.js +++ b/web_src/js/features/images.js @@ -26,6 +26,15 @@ export default async function initImage(){ } ] }, + place:{ + identifier : 'place', + rules: [ + { + type: 'empty', + + } + ] + }, } }) } @@ -75,8 +84,9 @@ export default async function initImage(){ type:'POST', data:formData, success:function(res){ + console.log(res) if(res.Code===1){ - $('.ui.info.message').text(res.Message).show().delay(1500).fadeOut(); + $('.ui.negative.message').text(res.Message).show().delay(2500).fadeOut(); }else if(res.Code==0){ if(location.href.indexOf('imageAdmin')!==-1){ location.href = `${window.config.AppSubUrl}/admin/images` @@ -105,6 +115,11 @@ export default async function initImage(){ $("textarea[name='description']").parent().addClass('error') return false } + if($("input[name='place']").length>0&&!$("input[name='place']").val()){ + console.log("1111111",$("input[name='place']")) + $("input[name='place']").parent().addClass('error') + return false + } const postData = { _csrf:$("input[name='_csrf']").val(), @@ -115,6 +130,10 @@ export default async function initImage(){ topics:$("input[name='topics']").val(), id:$("input[name='id']").val() } + if($("input[name='place']").val()&&$("input[name='isRecommend']:checked").val()){ + postData.isRecommend = $("input[name='isRecommend']:checked").val() + postData.place = $("input[name='place']").val() + } let formData = $params(postData) if($("input[name='edit']").val()=="edit"){ postImage(formData) @@ -143,15 +162,16 @@ export default async function initImage(){ } }) } - + return false }) $('#cancel_submit_image').click(()=>{ + console.log(pageform) if(link.includes('cloudbrain')){ let repoLink = link.split('cloudbrain')[0] location.href = `${window.config.AppSubUrl}${repoLink}debugjob?debugListType=all` }else if(pageform=='imageSquare'){ location.href = `${window.config.AppSubUrl}/explore/images?type=myimage` - }else if(pageform=='imageAdmin'){ + }else if(pageform){ location.href = `${window.config.AppSubUrl}/admin/images` } }) diff --git a/web_src/js/index.js b/web_src/js/index.js index 6fdd68d8d..88529bf8a 100755 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -34,7 +34,6 @@ import { } from './features/notification.js'; import {createCodeEditor} from './features/codeeditor.js'; import MinioUploader from './components/MinioUploader.vue'; -import ObsUploader from './components/ObsUploader.vue'; import EditAboutInfo from './components/EditAboutInfo.vue'; // import Images from './components/Images.vue'; import EditTopics from './components/EditTopics.vue'; @@ -184,11 +183,11 @@ function initBranchSelector() { }); $selectBranch.find('.reference.column').on('click', function () { $selectBranch.find('.scrolling.reference-list-menu').css('display', 'none'); - $selectBranch.find('.reference .text').removeClass('black'); + $selectBranch.find('.reference .text').addClass('black'); $($(this).data('target')).css('display', 'block'); $(this) - .find('.text') - .addClass('black'); + .find('.text.black') + .removeClass('black'); return false; }); } @@ -231,7 +230,7 @@ function initLabelEdit() { }); } -function updateIssuesMeta(url, action, issueIds, elementId, isAdd) { +function updateIssuesMeta(url, action, issueIds, elementId,isAdd) { return new Promise((resolve) => { $.ajax({ type: 'POST', @@ -241,13 +240,14 @@ function updateIssuesMeta(url, action, issueIds, elementId, isAdd) { action, issue_ids: issueIds, id: elementId, - is_add: isAdd + is_add: isAdd, }, success: resolve }); }); } + function initRepoStatusChecker() { const migrating = $('#repo_migrating'); $('#repo_migrating_failed').hide(); @@ -487,12 +487,13 @@ function initCommentForm() { const promises = []; Object.keys(labels).forEach((elementId) => { const label = labels[elementId]; + console.log("label:",label) const promise = updateIssuesMeta( label['update-url'], label.action, label['issue-id'], elementId, - label['is-checked'] + label['is-checked'], ); promises.push(promise); }); @@ -532,7 +533,7 @@ function initCommentForm() { '', $listMenu.data('issue-id'), $(this).data('id'), - $(this).data('is-checked') + $(this).data('is-checked'), ); $listMenu.data('action', 'update'); // Update to reload the page when we updated items return false; @@ -604,6 +605,7 @@ function initCommentForm() { $listMenu.data('issue-id'), '', '' + ).then(reload); } @@ -637,10 +639,16 @@ function initCommentForm() { initListSubmits('select-reviewers-modify', 'assignees'); function selectItem(select_id, input_id) { - const $menu = $(`${select_id} .menu`); + let $menu; + if (select_id=='.select-branch'){ + $menu = $(`${select_id} .menu`).eq(1); + }else{ + $menu = $(`${select_id} .menu`); + } + const $list = $(`.ui${select_id}.list`); const hasUpdateAction = $menu.data('action') === 'update'; - + $menu.find('.item:not(.no-select)').on('click', function () { $(this) .parent() @@ -651,12 +659,17 @@ function initCommentForm() { $(this).addClass('selected active'); if (hasUpdateAction) { + //let ref = '' + //if (select_id=='.select-branch'){ + // ref = $(this).data('name'); + // } + updateIssuesMeta( $menu.data('update-url'), '', $menu.data('issue-id'), $(this).data('id'), - $(this).data('is-checked') + $(this).data('is-checked'), ).then(reload); } switch (input_id) { @@ -709,6 +722,7 @@ function initCommentForm() { // Milestone and assignee selectItem('.select-milestone', '#milestone_id'); selectItem('.select-assignee', '#assignee_id'); + selectItem('.select-branch', ''); } function initInstall() { @@ -811,7 +825,7 @@ function initIssueComments() { const issueId = $(this).data('issue-id'); const id = $(this).data('id'); const isChecked = $(this).data('is-checked'); - + //const ref = $(this).data('name'); event.preventDefault(); updateIssuesMeta(url, '', issueId, id, isChecked).then(reload); }); @@ -2900,6 +2914,7 @@ $(document).ready(async () => { }) .get() .join(); + console.log("this:",this) const {url} = this.dataset; if (elementId === '0' && url.substr(-9) === '/assignee') { elementId = ''; @@ -2958,7 +2973,6 @@ $(document).ready(async () => { initCodeView(); initVueApp(); initVueUploader(); - initObsUploader(); initVueDataset(); initVueEditAbout(); initVueEditTopic(); @@ -3702,6 +3716,63 @@ function initVueEditAbout() { } function initVueDataset() { + if($('#dataset_check').length){ + if(location.search.indexOf('recommend=true')!==-1){ + $('#dataset_check').checkbox('set checked') + }else{ + $('#dataset_check').checkbox('set unchecked') + } + $('#dataset_check').checkbox({ + onChecked: function() { + if(location.search){ + const params = new URLSearchParams(location.search) + if(params.has('recommend')){ + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + '&recommend=true' + }else{ + location.href = `${window.config.AppSubUrl}/admin/datasets${location.search}&recommend=true` + } + }else{ + location.href = `${window.config.AppSubUrl}/admin/datasets?recommend=true` + } + }, + onUnchecked: function() { + if(location.search=='?recommend=true'){ + location.href = AppSubUrl + location.pathname + }else{ + const params = new URLSearchParams(location.search) + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + } + }, + }) + } + $('.set_dataset').on('click', function(){ + const $this = $(this); + let link = $this.data('url') + $.ajax({ + url:link, + type:'PUT', + success:function(res){ + console.log(res) + if(res.Code==0){ + window.location.href = '/admin/datasets' + }else{ + $('.ui.negative.message').text(res.Message).show().delay(1500).fadeOut(); + } + }, + error: function(xhr){ + // 隐藏 loading + // 只有请求不正常(状态码不为200)才会执行 + $('.ui.negative.message').html(xhr.responseText).show().delay(1500).fadeOut(); + console.log(xhr) + }, + complete:function(xhr){ + // $("#mask").css({"display":"none","z-index":"1"}) + } + }) + + }); const el = document.getElementById('dataset-base'); if (!el) { return; @@ -3759,24 +3830,14 @@ function initVueDataset() { if(document.getElementById('dataset-file-desc')){ dataset_file_desc = document.getElementById('dataset-file-desc').value } - - // getEditInit(){ - // if($('#dataset-edit-value')){ - // $this = $('#dataset-edit-value') - // this.ruleForm.title = $this.data('edit-title') || '' - // this.ruleForm.description = $this.data('edit-description') || '' - // this.ruleForm.category = $this.data('edit-category') || '' - // this.ruleForm.task = $this.data('edit-task') || '' - // this.ruleForm.license = $this.data('edit-license') || '' - // this.ruleForm.id = $this.data('edit-id')|| '' - // } - // }, new Vue({ delimiters: ['${', '}'], el, data: { suburl: AppSubUrl, url:'', + checked:false, + clusterFlag:false, type:0, desc:'', descfile:'', @@ -3854,8 +3915,7 @@ function initVueDataset() { }, }, components: { - MinioUploader, - ObsUploader + MinioUploader }, mounted(){ // if(document.getElementById('postPath')){ @@ -3874,6 +3934,12 @@ function initVueDataset() { this.getCurrentRepoDataset(this.repolink,this.cloudbrainType) } + const params = new URLSearchParams(location.search) + if (params.has('recommend') && params.get('recommend')=='true'){ + this.checked = true + }else{ + this.checked = false + } }, created(){ if(document.getElementById('postPath')){ @@ -3914,6 +3980,30 @@ function initVueDataset() { } }, + handleCheckedChange(val){ + if(val){ + if(location.search){ + const params = new URLSearchParams(location.search) + if(params.has('recommend')){ + params.delete('recommend') + let search = params.toString() + location.href = `${AppSubUrl}/explore/datasets?${search}&recommend=${val}` + }else{ + location.href = `${AppSubUrl}/explore/datasets${location.search}&recommend=${val}` + } + }else{ + location.href = `${AppSubUrl}/explore/datasets?recommend=${val}` + } + }else{ + if(location.search=='?recommend=true'){ + location.href = AppSubUrl + location.pathname + }else{ + const params = new URLSearchParams(location.search) + params.delete('recommend') + location.href = AppSubUrl + location.pathname + '?' + params.toString() + } + } + }, createDataset(formName){ let _this = this this.$refs[formName].validate((valid)=>{ @@ -3953,7 +4043,8 @@ function initVueDataset() { }, gotoUpload(repolink,datsetId){ - location.href = `${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}` + // location.href = `${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}` + window.open(`${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}`,'_blank') }, gotoDataset(datsetUrl){ location.href = datsetUrl @@ -3961,6 +4052,9 @@ function initVueDataset() { gotoAnnotate(repolink,uuid,type){ location.href = `${AppSubUrl}${repolink}/datasets/label/${uuid}?type=${type}` }, + setcluster(val){ + this.clusterFlag = val + }, uploadGpu(){ this.type=0 }, @@ -4380,19 +4474,6 @@ function initVueDataAnalysis() { render: h => h(DataAnalysis) }); } -// 新增 -function initObsUploader() { - const el = document.getElementById('obsUploader'); - if (!el) { - return; - } - - new Vue({ - el: '#obsUploader', - components: {ObsUploader}, - template: '' - }); -} function initVueWxAutorize() { const el = document.getElementById('WxAutorize'); if (!el) { diff --git a/web_src/less/openi.less b/web_src/less/openi.less index c195bac38..2a7d86665 100644 --- a/web_src/less/openi.less +++ b/web_src/less/openi.less @@ -248,7 +248,22 @@ footer .column{margin-bottom:0!important; padding-bottom:0!important;} .icon-bind{background-position: -550px -52px;} .icon-unbind{background-position: -568px -52px;} .CREATING, .STOPPING, .DELETING, .STARTING, i.WAITING ,.INIT,.KILLING{display:inline-block;background-image:url('/img/loading.gif');background-repeat:no-repeat;width:16px;height:16px;background-size:16px 16px;margin-right:5px;} - +.icon-to-top{ + background:url("/img/icons.svg"); + background-position: -540px -208px; + width: 30px; + height: 30px; + display: inline-block; + cursor: pointer; +} +.icon-to-bottom{ + background:url("/img/icons.svg"); + background-position: -574px -208px; + width: 30px; + height: 30px; + display: inline-block; + cursor: pointer; +} i.COMPLETED,i.SUCCEEDED{display:inline-block;width:18px;height:18px;background:url("/img/icons.svg");background-position: -496px -52px;background-position: -441px -52px;} .text_over{ overflow: hidden;
{{.ID}}{{.Title}}{{.Title}}{{if .Recommend}}{{end}} {{.CreatedUnix.FormatShort}}{{if .Recommend}}{{$.i18n.Tr "admin.datasets.unrecommend"}}{{else}}{{$.i18n.Tr "admin.datasets.recommend"}}{{end}}
+ {{$.i18n.Tr "repo.modelarts.code_version"}} + +
+ {{.BranchName}} +
+
{{$.i18n.Tr "cloudbrain.gpu_type"}}