diff --git a/models/attachment.go b/models/attachment.go index ea8f1645f..bb99ba467 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -43,6 +43,7 @@ type Attachment struct { Name string Description string `xorm:"TEXT"` DownloadCount int64 `xorm:"DEFAULT 0"` + UseNumber int64 `xorm:"DEFAULT 0"` Size int64 `xorm:"DEFAULT 0"` IsPrivate bool `xorm:"DEFAULT false"` DecompressState int32 `xorm:"DEFAULT 0"` @@ -107,6 +108,15 @@ func (a *Attachment) IncreaseDownloadCount() error { return nil } +func IncreaseAttachmentUseNumber(uuid string) error { + // Update use number. + if _, err := x.Exec("UPDATE `attachment` SET use_number=use_number+1 WHERE uuid=?", uuid); err != nil { + return fmt.Errorf("increase attachment use count: %v", err) + } + + return nil +} + func (a *Attachment) UpdateDatasetUpdateUnix() error { // Update download count. if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil { diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 810e68d30..21d9768f7 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -1150,6 +1150,17 @@ type LogFile struct { Name string } +type GetTrainJobMetricStatisticResult struct { + TrainJobResult + Interval int `json:"interval"` //查询的时间间隔,单位为分钟 + MetricsInfo []Metrics `json:"metrics"` //监控详情 +} + +type Metrics struct { + Metric string `json:"metric"` //监控指标项 + Value []string `json:"value"` //获取的监控值的序列,元素为String类型 +} + func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { sess := x.NewSession() defer sess.Close() @@ -1395,6 +1406,8 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) { if _, err = x.NoAutoTime().Insert(cloudbrain); err != nil { return err } + + go IncreaseDatasetUseCount(cloudbrain.Uuid) return nil } @@ -1629,6 +1642,8 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) { return err } + go IncreaseDatasetUseCount(new.Uuid) + return nil } func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { diff --git a/models/dataset.go b/models/dataset.go index d4a7748d3..726a5010f 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -25,8 +25,9 @@ type Dataset struct { Category string Description string `xorm:"TEXT"` DownloadTimes int64 - NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` - Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"` + UseCount int64 `xorm:"DEFAULT 0"` + NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"` + Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"` License string Task string ReleaseID int64 `xorm:"INDEX"` @@ -212,7 +213,7 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da defer sess.Close() datasets := make(DatasetList, 0, opts.PageSize) - selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend" + selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend,dataset.use_count" count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id"). Join("INNER", "attachment", "attachment.dataset_id=dataset.id"). @@ -350,6 +351,17 @@ func UpdateDataset(ctx DBContext, rel *Dataset) error { return err } +func IncreaseDatasetUseCount(uuid string) { + + IncreaseAttachmentUseNumber(uuid) + + attachment, _ := GetAttachmentByUUID(uuid) + if attachment != nil { + x.Exec("UPDATE `dataset` SET use_count=use_count+1 WHERE id=?", attachment.DatasetID) + } + +} + // GetDatasetByID returns Dataset with given ID. func GetDatasetByID(id int64) (*Dataset, error) { rel := new(Dataset) diff --git a/models/repo_list.go b/models/repo_list.go index 253cc968c..92654c11c 100755 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -218,6 +218,8 @@ const ( SearchOrderByForks SearchOrderBy = "num_forks ASC" SearchOrderByForksReverse SearchOrderBy = "num_forks DESC" SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC" + SearchOrderByUseCount SearchOrderBy = "use_count ASC" + SearchOrderByUseCountReverse SearchOrderBy = "use_count DESC" SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC" SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC" SearchOrderByWatches SearchOrderBy = "num_watches DESC" diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go index 2f7d08c35..961e02538 100755 --- a/modules/modelarts/resty.go +++ b/modules/modelarts/resty.go @@ -1119,3 +1119,44 @@ sendjob: return &result, nil } + +func GetTrainJobMetricStatistic(jobID, versionID, podName string) (*models.GetTrainJobMetricStatisticResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetTrainJobMetricStatisticResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID + "/versions/" + versionID + "/pod/" + podName + "/metric-statistic") + + if err != nil { + return nil, fmt.Errorf("resty GetTrainJobMetricStatistic: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetTrainJobMetricStatistic failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("GetTrainJobMetricStatistic failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetTrainJobMetricStatistic(%s) failed", jobID) + return &result, fmt.Errorf("获取任务资源占用情况失败:%s", result.ErrorMsg) + } + + return &result, nil +} diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 03349864a..33730b72c 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -564,3 +564,17 @@ func ObsCreateObject(path string) error { return nil } + +func GetObsLogFileName(prefix string) (string, error) { + input := &obs.ListObjectsInput{} + input.Bucket = setting.Bucket + input.Prefix = prefix + + output, err := ObsCli.ListObjects(input) + if err != nil { + log.Error("PutObject failed:", err.Error()) + return "", err + } + + return output.Contents[0].Key, nil +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index e38fbe09f..016aa4951 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -898,6 +898,10 @@ search_dataset = Search Dataset Files unzip_tooltips = If it has not been decompressed for a long time, please check whether the compressed package has encrypted files or file errors zip_failed = Decompression failed, please check whether the compressed package is encrypted or contact technical support dataset_desc = The description should not exceed 1024 characters +unzip_successed=Unzip Successed +unzip_failed=Unzip Failed +unzip_stared=Unzipping +unzip_status=Unzip Status [repo] owner = Owner repo_name = Repository Name @@ -1141,6 +1145,7 @@ modelarts.infer_job.model_version = Model/Version modelarts.infer_job.select_model = Select Model modelarts.infer_job.boot_file_helper=The startup file is the entry file for your program execution and must end in.py.Such as inference.py, main.py, example/inference. Py, case/main.py. modelarts.infer_job.tooltip = The model has been deleted and cannot be viewed. +modelarts.download_log=Download log file debug_task_not_created = Debug task has not been created @@ -1447,7 +1452,8 @@ issues.filter_sort.feweststars = Fewest stars issues.filter_sort.mostforks = Most forks issues.filter_sort.fewestforks = Fewest forks issues.filter_sort.downloadtimes = Most downloaded -issues.filter_sort.moststars = Most star +issues.filter_sort.mostusecount = Most Quote +issues.filter_sort.fewestusecount=Fewest Quote issues.action_open = Open issues.action_close = Close issues.action_label = Label diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index e5fb97c8e..440f6746a 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -903,6 +903,10 @@ search_dataset = 搜索数据集文件 unzip_tooltips = 如果长时间未解压,请检查压缩包是否有加密文件或者文件错误 zip_failed = 解压失败,请检查压缩包是否有加密或者联系技术支持人员。 dataset_desc = 描述字数不超过1024个字符 +unzip_successed=解压成功 +unzip_failed=解压失败 +unzip_stared=解压中 +unzip_status=解压状态 [repo] owner=拥有者 @@ -1151,6 +1155,7 @@ modelarts.infer_job.model_version = 模型/版本 modelarts.infer_job.select_model = 选择模型 modelarts.infer_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。比如inference.py、main.py、example/inference.py、case/main.py。 modelarts.infer_job.tooltip = 该模型已删除,无法查看。 +modelarts.download_log=下载日志文件 debug_task_not_created = 未创建过调试任务 @@ -1453,12 +1458,13 @@ issues.filter_sort.mostcomment=最多评论 issues.filter_sort.leastcomment=最少评论 issues.filter_sort.nearduedate=到期日从近到远 issues.filter_sort.farduedate=到期日从远到近 -issues.filter_sort.moststars=点赞由多到少 issues.filter_sort.feweststars=点赞由少到多 issues.filter_sort.mostforks=派生由多到少 issues.filter_sort.fewestforks=派生由少到多 issues.filter_sort.downloadtimes=下载次数 issues.filter_sort.moststars=收藏数量 +issues.filter_sort.mostusecount=最多引用 +issues.filter_sort.fewestusecount=最少引用 issues.action_open=开启 issues.action_close=关闭 issues.action_label=标签 diff --git a/routers/admin/dataset.go b/routers/admin/dataset.go index 6b29b06ff..d1a8f2780 100644 --- a/routers/admin/dataset.go +++ b/routers/admin/dataset.go @@ -61,6 +61,10 @@ func Datasets(ctx *context.Context) { orderBy = models.SearchOrderByForksReverse case "fewestforks": orderBy = models.SearchOrderByForks + case "mostusecount": + orderBy = models.SearchOrderByUseCountReverse + case "fewestusecount": + orderBy = models.SearchOrderByUseCount default: ctx.Data["SortType"] = "recentupdate" orderBy = models.SearchOrderByRecentUpdated diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 9a05aa8ae..d6d3b001a 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -922,6 +922,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/del_version", repo.DelTrainJobVersion) m.Post("/stop_version", repo.StopTrainJobVersion) m.Get("/model_list", repo.ModelList) + m.Get("/metric_statistics", repo.TrainJobGetMetricStatistic) }) }) m.Group("/inference-job", func() { diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go index 9e4edea03..c14976282 100755 --- a/routers/api/v1/repo/modelarts.go +++ b/routers/api/v1/repo/modelarts.go @@ -462,3 +462,46 @@ func ResultList(ctx *context.APIContext) { "PageIsCloudBrain": true, }) } + +func TrainJobGetMetricStatistic(ctx *context.APIContext) { + var ( + err error + ) + + var jobID = ctx.Params(":jobid") + var versionName = ctx.Query("version_name") + + result, err := trainJobGetMetricStatistic(jobID, versionName) + if err != nil { + log.Error("trainJobGetMetricStatistic(%s) failed:%v", jobID, err.Error()) + return + } + + ctx.JSON(http.StatusOK, map[string]interface{}{ + "JobID": jobID, + "Interval": result.Interval, + "MetricsInfo": result.MetricsInfo, + }) +} + +func trainJobGetMetricStatistic(jobID string, versionName string) (*models.GetTrainJobMetricStatisticResult, error) { + task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName) + if err != nil { + log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", jobID, err.Error()) + return nil, err + } + + resultLogFile, err := modelarts.GetTrainJobLogFileNames(jobID, strconv.FormatInt(task.VersionID, 10)) + if err != nil { + log.Error("GetTrainJobLogFileNames(%s) failed:%v", jobID, err.Error()) + return nil, err + } + + result, err := modelarts.GetTrainJobMetricStatistic(jobID, strconv.FormatInt(task.VersionID, 10), resultLogFile.LogFileList[0]) + if err != nil { + log.Error("GetTrainJobMetricStatistic(%s) failed:%v", jobID, err.Error()) + return nil, err + } + + return result, err +} diff --git a/routers/home.go b/routers/home.go index 38acffb2f..a34302876 100755 --- a/routers/home.go +++ b/routers/home.go @@ -322,6 +322,10 @@ func ExploreDatasets(ctx *context.Context) { orderBy = models.SearchOrderByStarsReverse case "feweststars": orderBy = models.SearchOrderByStars + case "mostusecount": + orderBy = models.SearchOrderByUseCountReverse + case "fewestusecount": + orderBy = models.SearchOrderByUseCount case "default": orderBy = models.SearchOrderByDefault default: diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index a5dd52956..b034d3d12 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -1150,6 +1150,7 @@ func CloudBrainDownloadModel(ctx *context.Context) { ctx.ServerError("PresignedGetURL", err) return } + ctx.Resp.Header().Set("Cache-Control", "max-age=0") http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) } diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go index 0e57fe1a0..9d7f4a4d7 100755 --- a/routers/repo/dataset.go +++ b/routers/repo/dataset.go @@ -172,6 +172,10 @@ func DatasetIndex(ctx *context.Context) { for _, attachment := range pageAttachments { uploader, _ := models.GetUserByID(attachment.UploaderID) attachment.Uploader = uploader + if !strings.HasSuffix(attachment.Name, ".zip") { + attachment.DecompressState = 3 //非zip文件 + } + } ctx.Data["Page"] = pager diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 95ca8df62..44476ab1e 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -2251,7 +2251,6 @@ func ModelDownload(ctx *context.Context) { versionName := ctx.Query("version_name") parentDir := ctx.Query("parent_dir") fileName := ctx.Query("file_name") - log.Info("DownloadSingleModelFile start.") task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName) if err != nil { log.Error("GetCloudbrainByJobID(%s) failed:%v", task.JobName, err.Error()) @@ -2259,7 +2258,6 @@ func ModelDownload(ctx *context.Context) { } path := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, setting.OutPutPath, versionName, parentDir, fileName), "/") - log.Info("Download path is:%s", path) url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, path) if err != nil { @@ -2267,6 +2265,7 @@ func ModelDownload(ctx *context.Context) { ctx.ServerError("GetObsCreateSignedUrl", err) return } + ctx.Resp.Header().Set("Cache-Control", "max-age=0") http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) } @@ -2278,13 +2277,11 @@ func ResultDownload(ctx *context.Context) { versionName := ctx.Query("version_name") parentDir := ctx.Query("parent_dir") fileName := ctx.Query("file_name") - log.Info("DownloadResult start.") task := ctx.Cloudbrain if err != nil { ctx.Data["error"] = err.Error() } path := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, "result/", versionName, parentDir, fileName), "/") - log.Info("Download path is:%s", path) url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, path) if err != nil { @@ -2292,6 +2289,7 @@ func ResultDownload(ctx *context.Context) { ctx.ServerError("GetObsCreateSignedUrl", err) return } + ctx.Resp.Header().Set("Cache-Control", "max-age=0") http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) } func DeleteJobStorage(jobName string) error { @@ -2390,3 +2388,35 @@ func SetJobCount(ctx *context.Context) { } ctx.Data["jobCount"] = jobCount } + +func TrainJobDownloadLogFile(ctx *context.Context) { + var ( + err error + ) + + var jobID = ctx.Params(":jobid") + versionName := ctx.Query("version_name") + task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName) + if err != nil { + log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", task.JobName, err.Error(), ctx.Data["msgID"]) + ctx.ServerError("GetCloudbrainByJobIDAndVersionName", err) + return + } + + prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, task.JobName, modelarts.LogPath, versionName), "/") + "/job" + key, err := storage.GetObsLogFileName(prefix) + if err != nil { + log.Error("GetObsLogFileName(%s) failed:%v", jobID, err.Error(), ctx.Data["msgID"]) + ctx.ServerError("GetObsLogFileName", err) + return + } + + url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, key) + if err != nil { + log.Error("GetObsCreateSignedUrlByBucketAndKey failed: %v", err.Error(), ctx.Data["msgID"]) + ctx.ServerError("GetObsCreateSignedUrlByBucketAndKey", err) + return + } + ctx.Resp.Header().Set("Cache-Control", "max-age=0") + http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) +} diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 7ba8fe61a..278c40204 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -1136,6 +1136,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobStop) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.TrainJobDel) m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload) + m.Get("/download_log_file", cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobDownloadLogFile) m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobNewVersion) m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion) }) diff --git a/templates/explore/datasets.tmpl b/templates/explore/datasets.tmpl index 5739ebb10..57942c24a 100644 --- a/templates/explore/datasets.tmpl +++ b/templates/explore/datasets.tmpl @@ -1,133 +1,164 @@ {{template "base/head" .}}