@@ -60,7 +60,7 @@ type Cloudbrain struct { | |||||
ContainerIp string | ContainerIp string | ||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` | ||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` | ||||
Duration int `xorm:"INDEX duration"` | |||||
Duration int64 `xorm:"INDEX duration"` | |||||
TrainJobDuration string | TrainJobDuration string | ||||
DeletedAt time.Time `xorm:"deleted"` | DeletedAt time.Time `xorm:"deleted"` | ||||
CanDebug bool `xorm:"-"` | CanDebug bool `xorm:"-"` | ||||
@@ -933,7 +933,7 @@ func SetCloudbrainStatusByJobID(jobID string, status CloudbrainStatus) (err erro | |||||
return | return | ||||
} | } | ||||
func SetTrainJobStatusByJobID(jobID string, status string, duration int, trainjobduration string) (err error) { | |||||
func SetTrainJobStatusByJobID(jobID string, status string, duration int64, trainjobduration string) (err error) { | |||||
cb := &Cloudbrain{JobID: jobID, Status: string(status), Duration: duration, TrainJobDuration: trainjobduration} | cb := &Cloudbrain{JobID: jobID, Status: string(status), Duration: duration, TrainJobDuration: trainjobduration} | ||||
_, err = x.Cols("status", "duration", "train_job_duration").Where("cloudbrain.job_id=?", jobID).Update(cb) | _, err = x.Cols("status", "duration", "train_job_duration").Where("cloudbrain.job_id=?", jobID).Update(cb) | ||||
return | return | ||||
@@ -9,8 +9,10 @@ import ( | |||||
// RepoStatistic statistic info of all repository | // RepoStatistic statistic info of all repository | ||||
type RepoStatistic struct { | type RepoStatistic struct { | ||||
ID int64 `xorm:"pk autoincr"` | |||||
RepoID int64 `xorm:"unique(s) NOT NULL"` | |||||
ID int64 `xorm:"pk autoincr"` | |||||
RepoID int64 `xorm:"unique(s) NOT NULL"` | |||||
Name string `xorm:"INDEX"` | |||||
IsPrivate bool | |||||
Date string `xorm:"unique(s) NOT NULL"` | Date string `xorm:"unique(s) NOT NULL"` | ||||
NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` | NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` | ||||
NumWatchesAdded int64 `xorm:"NOT NULL DEFAULT 0"` | NumWatchesAdded int64 `xorm:"NOT NULL DEFAULT 0"` | ||||
@@ -221,7 +221,7 @@ func TransTrainJobStatus(status int) string { | |||||
case 0: | case 0: | ||||
return "UNKNOWN" | return "UNKNOWN" | ||||
case 1: | case 1: | ||||
return "CREATING" | |||||
return "INIT" | |||||
case 2: | case 2: | ||||
return "IMAGE_CREATING" | return "IMAGE_CREATING" | ||||
case 3: | case 3: | ||||
@@ -237,13 +237,13 @@ func TransTrainJobStatus(status int) string { | |||||
case 8: | case 8: | ||||
return "RUNNING" | return "RUNNING" | ||||
case 9: | case 9: | ||||
return "STOPPED" | |||||
return "KILLING" | |||||
case 10: | case 10: | ||||
return "COMPLETED" | return "COMPLETED" | ||||
case 11: | case 11: | ||||
return "FAILED" | return "FAILED" | ||||
case 12: | case 12: | ||||
return "STOPPED" | |||||
return "KILLED" | |||||
case 13: | case 13: | ||||
return "CANCELED" | return "CANCELED" | ||||
case 14: | case 14: | ||||
@@ -1295,7 +1295,7 @@ func SetRadarMapConfig() { | |||||
RadarMap.Impact = sec.Key("impact").MustFloat64(0.3) | RadarMap.Impact = sec.Key("impact").MustFloat64(0.3) | ||||
RadarMap.ImpactWatch = sec.Key("impact_watch").MustFloat64(0.1) | RadarMap.ImpactWatch = sec.Key("impact_watch").MustFloat64(0.1) | ||||
RadarMap.ImpactStar = sec.Key("impact_star").MustFloat64(0.3) | |||||
RadarMap.ImpactStar = sec.Key("impact_star").MustFloat64(0.2) | |||||
RadarMap.ImpactFork = sec.Key("impact_fork").MustFloat64(0.3) | RadarMap.ImpactFork = sec.Key("impact_fork").MustFloat64(0.3) | ||||
RadarMap.ImpactCodeDownload = sec.Key("impact_code_download").MustFloat64(0.2) | RadarMap.ImpactCodeDownload = sec.Key("impact_code_download").MustFloat64(0.2) | ||||
RadarMap.ImpactComments = sec.Key("impact_comments").MustFloat64(0.1) | RadarMap.ImpactComments = sec.Key("impact_comments").MustFloat64(0.1) | ||||
@@ -5,7 +5,6 @@ | |||||
package storage | package storage | ||||
import ( | import ( | ||||
"fmt" | |||||
"io" | "io" | ||||
"path" | "path" | ||||
"strconv" | "strconv" | ||||
@@ -18,6 +17,15 @@ import ( | |||||
"code.gitea.io/gitea/modules/setting" | "code.gitea.io/gitea/modules/setting" | ||||
) | ) | ||||
type FileInfo struct { | |||||
FileName string `json:"FileName"` | |||||
ModTime string `json:"ModTime"` | |||||
IsDir bool `json:"IsDir"` | |||||
Size int64 `json:"Size"` | |||||
ParenDir string `json:"ParenDir"` | |||||
UUID string `json:"UUID"` | |||||
} | |||||
//check if has the object | //check if has the object | ||||
//todo:修改查询方式 | //todo:修改查询方式 | ||||
func ObsHasObject(path string) (bool, error) { | func ObsHasObject(path string) (bool, error) { | ||||
@@ -141,8 +149,7 @@ func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) { | |||||
output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) | output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) | ||||
return output.Body, nil | return output.Body, nil | ||||
} else if obsError, ok := err.(obs.ObsError); ok { | } else if obsError, ok := err.(obs.ObsError); ok { | ||||
fmt.Printf("Code:%s\n", obsError.Code) | |||||
fmt.Printf("Message:%s\n", obsError.Message) | |||||
log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) | |||||
return nil, obsError | return nil, obsError | ||||
} else { | } else { | ||||
return nil, err | return nil, err | ||||
@@ -160,40 +167,49 @@ func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) { | |||||
output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) | output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) | ||||
return output.Body, nil | return output.Body, nil | ||||
} else if obsError, ok := err.(obs.ObsError); ok { | } else if obsError, ok := err.(obs.ObsError); ok { | ||||
fmt.Printf("Code:%s\n", obsError.Code) | |||||
fmt.Printf("Message:%s\n", obsError.Message) | |||||
log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) | |||||
return nil, obsError | return nil, obsError | ||||
} else { | } else { | ||||
return nil, err | return nil, err | ||||
} | } | ||||
} | } | ||||
func GetObsListObject(jobName string) ([]string, error) { | |||||
// jobName = "liuzx202110271830856" | |||||
func GetObsListObject(jobName, parentDir string) ([]FileInfo, error) { | |||||
input := &obs.ListObjectsInput{} | input := &obs.ListObjectsInput{} | ||||
input.Bucket = setting.Bucket | input.Bucket = setting.Bucket | ||||
input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath), "/") | |||||
log.Info("input.Prefix:", input.Prefix) | |||||
input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir), "/") | |||||
output, err := ObsCli.ListObjects(input) | output, err := ObsCli.ListObjects(input) | ||||
log.Info("output.Prefix:", output) | |||||
ModelListArr := make([]string, 0) | |||||
fileInfos := make([]FileInfo, 0) | |||||
if err == nil { | if err == nil { | ||||
fmt.Printf("RequestId:%s\n", output.RequestId) | |||||
for index, val := range output.Contents { | |||||
fmt.Printf("Content[%d]-OwnerId:%s, ETag:%s, Key:%s, LastModified:%s, Size:%d\n", | |||||
index, val.Owner.ID, val.ETag, val.Key, val.LastModified, val.Size) | |||||
for _, val := range output.Contents { | |||||
str1 := strings.Split(val.Key, "/") | str1 := strings.Split(val.Key, "/") | ||||
ModelList := str1[len(str1)-1] | |||||
ModelListArr = append(ModelListArr, ModelList) | |||||
log.Info("ModelListArr.Prefix:", ModelListArr) | |||||
var isDir bool | |||||
var fileName,nextParentDir string | |||||
if strings.HasSuffix(val.Key, "/") { | |||||
fileName = str1[len(str1)-2] | |||||
isDir = true | |||||
nextParentDir = fileName | |||||
if fileName == parentDir || (fileName + "/") == setting.OutPutPath { | |||||
continue | |||||
} | |||||
} else { | |||||
fileName = str1[len(str1)-1] | |||||
isDir = false | |||||
} | |||||
fileInfo := FileInfo{ | |||||
ModTime: val.LastModified.Format("2006-01-02 15:04:05"), | |||||
FileName: fileName, | |||||
Size: val.Size, | |||||
IsDir:isDir, | |||||
ParenDir: nextParentDir, | |||||
} | |||||
fileInfos = append(fileInfos, fileInfo) | |||||
} | } | ||||
return ModelListArr, err | |||||
return fileInfos, err | |||||
} else { | } else { | ||||
if obsError, ok := err.(obs.ObsError); ok { | if obsError, ok := err.(obs.ObsError); ok { | ||||
fmt.Println(obsError.Code) | |||||
fmt.Println(obsError.Message) | |||||
} else { | |||||
fmt.Println(err) | |||||
log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) | |||||
} | } | ||||
return nil, err | return nil, err | ||||
} | } | ||||
@@ -222,20 +238,17 @@ func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, file | |||||
return output.SignedUrl, nil | return output.SignedUrl, nil | ||||
} | } | ||||
func GetObsCreateSignedUrl(uuid string, uploadId string, partNumber int, fileName string) (string, error) { | |||||
func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error) { | |||||
input := &obs.CreateSignedUrlInput{} | input := &obs.CreateSignedUrlInput{} | ||||
input.Bucket = setting.Bucket | input.Bucket = setting.Bucket | ||||
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||||
input.Key = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/") | |||||
input.Expires = 60 * 60 | input.Expires = 60 * 60 | ||||
input.Method = obs.HttpMethodPut | |||||
input.QueryParams = map[string]string{ | |||||
"partNumber": com.ToStr(partNumber, 10), | |||||
"uploadId": uploadId, | |||||
//"partSize": com.ToStr(partSize,10), | |||||
} | |||||
input.Method = obs.HttpMethodGet | |||||
reqParams := make(map[string]string) | |||||
reqParams["response-content-disposition"] = "attachment; filename=\"" + fileName + "\"" | |||||
input.QueryParams = reqParams | |||||
output, err := ObsCli.CreateSignedUrl(input) | output, err := ObsCli.CreateSignedUrl(input) | ||||
if err != nil { | if err != nil { | ||||
log.Error("CreateSignedUrl failed:", err.Error()) | log.Error("CreateSignedUrl failed:", err.Error()) | ||||
@@ -64,6 +64,8 @@ func GetModelArtsTrainJob(ctx *context.APIContext) { | |||||
} | } | ||||
job.Status = modelarts.TransTrainJobStatus(result.IntStatus) | job.Status = modelarts.TransTrainJobStatus(result.IntStatus) | ||||
job.Duration = result.Duration | |||||
job.TrainJobDuration = result.TrainJobDuration | |||||
err = models.UpdateJob(job) | err = models.UpdateJob(job) | ||||
if err != nil { | if err != nil { | ||||
log.Error("UpdateJob failed:", err) | log.Error("UpdateJob failed:", err) | ||||
@@ -510,7 +510,7 @@ func CloudBrainShowModels(ctx *context.Context) { | |||||
return | return | ||||
} | } | ||||
var fileInfos []FileInfo | |||||
var fileInfos []storage.FileInfo | |||||
err = json.Unmarshal([]byte(dirs), &fileInfos) | err = json.Unmarshal([]byte(dirs), &fileInfos) | ||||
if err != nil { | if err != nil { | ||||
log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) | log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) | ||||
@@ -583,74 +583,6 @@ func CloudBrainDownloadModel(ctx *context.Context) { | |||||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | ||||
} | } | ||||
// func TrainJobloadModel(ctx *context.Context) { | |||||
// parentDir := ctx.Query("parentDir") | |||||
// fileName := ctx.Query("fileName") | |||||
// jobName := ctx.Query("jobName") | |||||
// filePath := "jobs/" + jobName + "/model/" + parentDir | |||||
// url, err := storage.Attachments.PresignedGetURL(filePath, fileName) | |||||
// if err != nil { | |||||
// log.Error("PresignedGetURL failed: %v", err.Error(), ctx.Data["msgID"]) | |||||
// ctx.ServerError("PresignedGetURL", err) | |||||
// return | |||||
// } | |||||
// http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||||
// } | |||||
func TrainJobListModel(ctx *context.Context) { | |||||
ctx.Data["PageIsCloudBrain"] = true | |||||
jobID := ctx.Params(":jobid") | |||||
task, err := models.GetCloudbrainByJobID(jobID) | |||||
if err != nil { | |||||
log.Error("no such job!", ctx.Data["err"]) | |||||
ctx.ServerError("no such job:", err) | |||||
return | |||||
} | |||||
TrainJobListModel, err := storage.GetObsListObject(task.JobName) | |||||
log.Info("TrainJobListModel", TrainJobListModel) | |||||
fmt.Println("TrainJobListModel:", TrainJobListModel) | |||||
if err != nil { | |||||
log.Info("get TrainJobListModel failed:", err) | |||||
return | |||||
} | |||||
ctx.Data["task"] = task | |||||
ctx.Data["JobID"] = jobID | |||||
ctx.Data["ListModel"] = TrainJobListModel | |||||
ctx.HTML(200, tplModelArtsTrainJobListModel) | |||||
} | |||||
func TrainJobDownloadModel(ctx *context.Context) { | |||||
JobName := ctx.Query("JobName") | |||||
fileName := ctx.Query("file_name") | |||||
// JobName = "liuzx202110271830856" | |||||
// fileName = "Untitled.ipynb" | |||||
body, err := storage.ObsModelDownload(JobName, fileName) | |||||
if err != nil { | |||||
log.Info("download error.") | |||||
} else { | |||||
defer body.Close() | |||||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName) | |||||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||||
p := make([]byte, 1024) | |||||
var readErr error | |||||
var readCount int | |||||
// 读取对象内容 | |||||
for { | |||||
readCount, readErr = body.Read(p) | |||||
if readCount > 0 { | |||||
ctx.Resp.Write(p[:readCount]) | |||||
//fmt.Printf("%s", p[:readCount]) | |||||
} | |||||
if readErr != nil { | |||||
break | |||||
} | |||||
} | |||||
} | |||||
} | |||||
func GetRate(ctx *context.Context) { | func GetRate(ctx *context.Context) { | ||||
var jobID = ctx.Params(":jobid") | var jobID = ctx.Params(":jobid") | ||||
job, err := models.GetCloudbrainByJobID(jobID) | job, err := models.GetCloudbrainByJobID(jobID) | ||||
@@ -21,15 +21,6 @@ const ( | |||||
tplDirIndex base.TplName = "repo/datasets/dirs/index" | tplDirIndex base.TplName = "repo/datasets/dirs/index" | ||||
) | ) | ||||
type FileInfo struct { | |||||
FileName string `json:"FileName"` | |||||
ModTime string `json:"ModTime"` | |||||
IsDir bool `json:"IsDir"` | |||||
Size int64 `json:"Size"` | |||||
ParenDir string `json:"ParenDir"` | |||||
UUID string `json:"UUID"` | |||||
} | |||||
type RespGetDirs struct { | type RespGetDirs struct { | ||||
ResultCode string `json:"resultCode"` | ResultCode string `json:"resultCode"` | ||||
FileInfos string `json:"fileInfos"` | FileInfos string `json:"fileInfos"` | ||||
@@ -59,7 +50,7 @@ func DeleteAllUnzipFile(attachment *models.Attachment, parentDir string) { | |||||
return | return | ||||
} | } | ||||
var fileInfos []FileInfo | |||||
var fileInfos []storage.FileInfo | |||||
err = json.Unmarshal([]byte(dirs), &fileInfos) | err = json.Unmarshal([]byte(dirs), &fileInfos) | ||||
if err != nil { | if err != nil { | ||||
log.Error("json.Unmarshal failed:", err.Error()) | log.Error("json.Unmarshal failed:", err.Error()) | ||||
@@ -3,7 +3,6 @@ package repo | |||||
import ( | import ( | ||||
"encoding/json" | "encoding/json" | ||||
"errors" | "errors" | ||||
"fmt" | |||||
"io" | "io" | ||||
"net/http" | "net/http" | ||||
"os" | "os" | ||||
@@ -12,18 +11,18 @@ import ( | |||||
"strings" | "strings" | ||||
"time" | "time" | ||||
"code.gitea.io/gitea/modules/git" | |||||
"code.gitea.io/gitea/modules/modelarts" | |||||
"code.gitea.io/gitea/modules/obs" | |||||
"code.gitea.io/gitea/modules/storage" | |||||
"github.com/unknwon/com" | |||||
"code.gitea.io/gitea/models" | "code.gitea.io/gitea/models" | ||||
"code.gitea.io/gitea/modules/auth" | "code.gitea.io/gitea/modules/auth" | ||||
"code.gitea.io/gitea/modules/base" | "code.gitea.io/gitea/modules/base" | ||||
"code.gitea.io/gitea/modules/context" | "code.gitea.io/gitea/modules/context" | ||||
"code.gitea.io/gitea/modules/git" | |||||
"code.gitea.io/gitea/modules/log" | "code.gitea.io/gitea/modules/log" | ||||
"code.gitea.io/gitea/modules/modelarts" | |||||
"code.gitea.io/gitea/modules/obs" | |||||
"code.gitea.io/gitea/modules/setting" | "code.gitea.io/gitea/modules/setting" | ||||
"code.gitea.io/gitea/modules/storage" | |||||
"github.com/unknwon/com" | |||||
) | ) | ||||
const ( | const ( | ||||
@@ -39,7 +38,7 @@ const ( | |||||
tplModelArtsTrainJobIndex base.TplName = "repo/modelarts/trainjob/index" | tplModelArtsTrainJobIndex base.TplName = "repo/modelarts/trainjob/index" | ||||
tplModelArtsTrainJobNew base.TplName = "repo/modelarts/trainjob/new" | tplModelArtsTrainJobNew base.TplName = "repo/modelarts/trainjob/new" | ||||
tplModelArtsTrainJobShow base.TplName = "repo/modelarts/trainjob/show" | tplModelArtsTrainJobShow base.TplName = "repo/modelarts/trainjob/show" | ||||
tplModelArtsTrainJobListModel base.TplName = "repo/modelarts/trainjob/list_model" | |||||
tplModelArtsTrainJobShowModels base.TplName = "repo/modelarts/trainjob/models/index" | |||||
) | ) | ||||
// MustEnableDataset check if repository enable internal cb | // MustEnableDataset check if repository enable internal cb | ||||
@@ -492,13 +491,13 @@ func NotebookDel(ctx *context.Context) { | |||||
func TrainJobIndex(ctx *context.Context) { | func TrainJobIndex(ctx *context.Context) { | ||||
MustEnableModelArts(ctx) | MustEnableModelArts(ctx) | ||||
can, err := canUserCreateTrainJob(ctx.User.ID) | |||||
if err != nil { | |||||
ctx.ServerError("canUserCreateTrainJob", err) | |||||
return | |||||
} | |||||
ctx.Data["CanCreate"] = can | |||||
//can, err := canUserCreateTrainJob(ctx.User.ID) | |||||
//if err != nil { | |||||
// ctx.ServerError("canUserCreateTrainJob", err) | |||||
// return | |||||
//} | |||||
// | |||||
//ctx.Data["CanCreate"] = can | |||||
repo := ctx.Repo.Repository | repo := ctx.Repo.Repository | ||||
page := ctx.QueryInt("page") | page := ctx.QueryInt("page") | ||||
@@ -506,43 +505,7 @@ func TrainJobIndex(ctx *context.Context) { | |||||
page = 1 | page = 1 | ||||
} | } | ||||
tasks, _, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||||
ListOptions: models.ListOptions{ | |||||
Page: page, | |||||
PageSize: setting.UI.IssuePagingNum, | |||||
}, | |||||
RepoID: repo.ID, | |||||
Type: models.TypeCloudBrainTrainJob, | |||||
}) | |||||
if err != nil { | |||||
ctx.ServerError("Cloudbrain", err) | |||||
return | |||||
} | |||||
for i := range tasks { | |||||
TrainJobDetail, err := modelarts.GetTrainJob(tasks[i].Cloudbrain.JobID, strconv.FormatInt(tasks[i].Cloudbrain.VersionID, 10)) | |||||
if TrainJobDetail != nil { | |||||
TrainJobDetail.CreateTime = time.Unix(int64(TrainJobDetail.LongCreateTime/1000), 0).Format("2006-01-02 15:04:05") | |||||
if TrainJobDetail.Duration != 0 { | |||||
TrainJobDetail.TrainJobDuration = addZero(TrainJobDetail.Duration/3600000) + ":" + addZero(TrainJobDetail.Duration%3600000/60000) + ":" + addZero(TrainJobDetail.Duration%60000/1000) | |||||
} else { | |||||
TrainJobDetail.TrainJobDuration = "00:00:00" | |||||
} | |||||
} | |||||
if err != nil { | |||||
log.Error("GetJob(%s) failed:%v", tasks[i].Cloudbrain.JobID, err.Error()) | |||||
return | |||||
} | |||||
err = models.SetTrainJobStatusByJobID(tasks[i].Cloudbrain.JobID, modelarts.TransTrainJobStatus(TrainJobDetail.IntStatus), int(TrainJobDetail.Duration), string(TrainJobDetail.TrainJobDuration)) | |||||
// err = models.UpdateJob(tasks[i].Cloudbrain) | |||||
if err != nil { | |||||
ctx.ServerError("UpdateJob failed", err) | |||||
return | |||||
} | |||||
} | |||||
trainTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||||
tasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||||
ListOptions: models.ListOptions{ | ListOptions: models.ListOptions{ | ||||
Page: page, | Page: page, | ||||
PageSize: setting.UI.IssuePagingNum, | PageSize: setting.UI.IssuePagingNum, | ||||
@@ -560,24 +523,24 @@ func TrainJobIndex(ctx *context.Context) { | |||||
ctx.Data["Page"] = pager | ctx.Data["Page"] = pager | ||||
ctx.Data["PageIsCloudBrain"] = true | ctx.Data["PageIsCloudBrain"] = true | ||||
ctx.Data["Tasks"] = trainTasks | |||||
ctx.Data["Tasks"] = tasks | |||||
ctx.HTML(200, tplModelArtsTrainJobIndex) | ctx.HTML(200, tplModelArtsTrainJobIndex) | ||||
} | } | ||||
func TrainJobNew(ctx *context.Context) { | func TrainJobNew(ctx *context.Context) { | ||||
ctx.Data["PageIsCloudBrain"] = true | ctx.Data["PageIsCloudBrain"] = true | ||||
can, err := canUserCreateTrainJob(ctx.User.ID) | |||||
if err != nil { | |||||
ctx.ServerError("canUserCreateTrainJob", err) | |||||
return | |||||
} | |||||
if !can { | |||||
log.Error("the user can not create train-job") | |||||
ctx.ServerError("the user can not create train-job", fmt.Errorf("the user can not create train-job")) | |||||
return | |||||
} | |||||
//can, err := canUserCreateTrainJob(ctx.User.ID) | |||||
//if err != nil { | |||||
// ctx.ServerError("canUserCreateTrainJob", err) | |||||
// return | |||||
//} | |||||
// | |||||
//if !can { | |||||
// log.Error("the user can not create train-job") | |||||
// ctx.ServerError("the user can not create train-job", fmt.Errorf("the user can not create train-job")) | |||||
// return | |||||
//} | |||||
t := time.Now() | t := time.Now() | ||||
var jobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] | var jobName = cutString(ctx.User.Name, 5) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] | ||||
@@ -901,12 +864,13 @@ func TrainJobShow(ctx *context.Context) { | |||||
} else { | } else { | ||||
result.TrainJobDuration = "00:00:00" | result.TrainJobDuration = "00:00:00" | ||||
} | } | ||||
err = models.SetTrainJobStatusByJobID(jobID, modelarts.TransTrainJobStatus(result.IntStatus), int(result.Duration), string(result.TrainJobDuration)) | |||||
result.Status = modelarts.TransTrainJobStatus(result.IntStatus) | |||||
err = models.SetTrainJobStatusByJobID(jobID, result.Status, result.Duration, string(result.TrainJobDuration)) | |||||
if err != nil { | if err != nil { | ||||
ctx.ServerError("UpdateJob failed", err) | ctx.ServerError("UpdateJob failed", err) | ||||
return | return | ||||
} | } | ||||
result.Status = modelarts.TransTrainJobStatus(result.IntStatus) | |||||
result.DatasetName = attach.Name | result.DatasetName = attach.Name | ||||
} | } | ||||
@@ -1095,3 +1059,43 @@ func getConfigList(perPage, page int, sortBy, order, searchContent, configType s | |||||
return list, nil | return list, nil | ||||
} | } | ||||
func TrainJobShowModels(ctx *context.Context) { | |||||
ctx.Data["PageIsCloudBrain"] = true | |||||
jobID := ctx.Params(":jobid") | |||||
parentDir := ctx.Query("parentDir") | |||||
dirArray := strings.Split(parentDir, "/") | |||||
task, err := models.GetCloudbrainByJobID(jobID) | |||||
if err != nil { | |||||
log.Error("no such job!", ctx.Data["msgID"]) | |||||
ctx.ServerError("no such job:", err) | |||||
return | |||||
} | |||||
models, err := storage.GetObsListObject(task.JobName, parentDir) | |||||
if err != nil { | |||||
log.Info("get TrainJobListModel failed:", err) | |||||
ctx.ServerError("GetObsListObject:", err) | |||||
return | |||||
} | |||||
ctx.Data["Path"] = dirArray | |||||
ctx.Data["Dirs"] = models | |||||
ctx.Data["task"] = task | |||||
ctx.Data["JobID"] = jobID | |||||
ctx.HTML(200, tplModelArtsTrainJobShowModels) | |||||
} | |||||
func TrainJobDownloadModel(ctx *context.Context) { | |||||
parentDir := ctx.Query("parentDir") | |||||
fileName := ctx.Query("fileName") | |||||
jobName := ctx.Query("jobName") | |||||
url, err := storage.GetObsCreateSignedUrl(jobName, parentDir, fileName) | |||||
if err != nil { | |||||
log.Error("GetObsCreateSignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) | |||||
ctx.ServerError("GetObsCreateSignedUrl", err) | |||||
return | |||||
} | |||||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||||
} |
@@ -531,6 +531,8 @@ func Download(ctx *context.Context) { | |||||
} | } | ||||
} | } | ||||
ctx.Repo.Repository.IncreaseCloneCnt() | |||||
ctx.ServeFile(archivePath, ctx.Repo.Repository.Name+"-"+refName+ext) | ctx.ServeFile(archivePath, ctx.Repo.Repository.Name+"-"+refName+ext) | ||||
} | } | ||||
@@ -23,7 +23,7 @@ func RepoStatisticAuto() { | |||||
func RepoStatisticDaily(date string) { | func RepoStatisticDaily(date string) { | ||||
log.Info("%s", date) | log.Info("%s", date) | ||||
log.Info("begin Repo Statistic") | log.Info("begin Repo Statistic") | ||||
t, _ := time.Parse("2006-01-02", "date") | |||||
t, _ := time.Parse("2006-01-02", date) | |||||
if err := models.DeleteRepoStatDaily(date); err != nil { | if err := models.DeleteRepoStatDaily(date); err != nil { | ||||
log.Error("DeleteRepoStatDaily failed: %v", err.Error()) | log.Error("DeleteRepoStatDaily failed: %v", err.Error()) | ||||
return | return | ||||
@@ -89,6 +89,8 @@ func RepoStatisticDaily(date string) { | |||||
repoStat := models.RepoStatistic{ | repoStat := models.RepoStatistic{ | ||||
RepoID: repo.ID, | RepoID: repo.ID, | ||||
Date: date, | Date: date, | ||||
Name: repo.Name, | |||||
IsPrivate: repo.IsPrivate, | |||||
NumWatches: int64(repo.NumWatches), | NumWatches: int64(repo.NumWatches), | ||||
NumStars: int64(repo.NumStars), | NumStars: int64(repo.NumStars), | ||||
NumDownloads: repo.CloneCnt, | NumDownloads: repo.CloneCnt, | ||||
@@ -991,7 +991,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||||
m.Post("/stop", reqRepoCloudBrainWriter, repo.TrainJobStop) | m.Post("/stop", reqRepoCloudBrainWriter, repo.TrainJobStop) | ||||
m.Post("/del", reqRepoCloudBrainWriter, repo.TrainJobDel) | m.Post("/del", reqRepoCloudBrainWriter, repo.TrainJobDel) | ||||
m.Get("/log", reqRepoCloudBrainReader, repo.TrainJobGetLog) | m.Get("/log", reqRepoCloudBrainReader, repo.TrainJobGetLog) | ||||
m.Get("/models", reqRepoCloudBrainReader, repo.TrainJobListModel) | |||||
m.Get("/models", reqRepoCloudBrainReader, repo.TrainJobShowModels) | |||||
m.Get("/download_model", reqRepoCloudBrainReader, repo.TrainJobDownloadModel) | m.Get("/download_model", reqRepoCloudBrainReader, repo.TrainJobDownloadModel) | ||||
}) | }) | ||||
m.Get("/create", reqRepoCloudBrainReader, repo.TrainJobNew) | m.Get("/create", reqRepoCloudBrainReader, repo.TrainJobNew) | ||||
@@ -415,7 +415,7 @@ | |||||
$(".job-status").each((index, job) => { | $(".job-status").each((index, job) => { | ||||
const jobID = job.dataset.jobid; | const jobID = job.dataset.jobid; | ||||
const repoPath = job.dataset.repopath; | const repoPath = job.dataset.repopath; | ||||
if (job.textContent.trim() == 'STOPPED') { | |||||
if (job.textContent.trim() == 'STOPPED' || job.textContent.trim() == 'START_FAILED' || job.textContent.trim() == 'CREATE_FAILED') { | |||||
return | return | ||||
} | } | ||||
@@ -428,7 +428,7 @@ | |||||
$(".job-status").each((index, job) => { | $(".job-status").each((index, job) => { | ||||
const jobID = job.dataset.jobid; | const jobID = job.dataset.jobid; | ||||
const repoPath = job.dataset.repopath; | const repoPath = job.dataset.repopath; | ||||
if (job.textContent.trim() == 'STOPPED') { | |||||
if (job.textContent.trim() == 'STOPPED' || job.textContent.trim() == 'START_FAILED' || job.textContent.trim() == 'CREATE_FAILED') { | |||||
return | return | ||||
} | } | ||||
@@ -436,7 +436,7 @@ | |||||
const jobID = data.JobID | const jobID = data.JobID | ||||
const status = data.JobStatus | const status = data.JobStatus | ||||
if (status != job.textContent.trim()) { | if (status != job.textContent.trim()) { | ||||
console.log("---------") | |||||
$('#' + jobID+'-icon').removeClass().addClass(status) | $('#' + jobID+'-icon').removeClass().addClass(status) | ||||
$('#' + jobID+ '-text').text(status) | $('#' + jobID+ '-text').text(status) | ||||
@@ -464,7 +464,9 @@ | |||||
$(".job-status").each((index, job) => { | $(".job-status").each((index, job) => { | ||||
const jobID = job.dataset.jobid; | const jobID = job.dataset.jobid; | ||||
const repoPath = job.dataset.repopath; | const repoPath = job.dataset.repopath; | ||||
if (job.textContent.trim() == 'STOPPED') { | |||||
if (job.textContent.trim() == 'IMAGE_FAILED' || job.textContent.trim() == 'SUBMIT_FAILED' || job.textContent.trim() == 'DELETE_FAILED' | |||||
|| job.textContent.trim() == 'KILLED' || job.textContent.trim() == 'COMPLETED' || job.textContent.trim() == 'FAILED' | |||||
|| job.textContent.trim() == 'CANCELED' || job.textContent.trim() == 'LOST') { | |||||
return | return | ||||
} | } | ||||
@@ -493,8 +495,8 @@ | |||||
if(status==="RUNNING"){ | if(status==="RUNNING"){ | ||||
$('#model-debug').removeClass('disabled') | $('#model-debug').removeClass('disabled') | ||||
$('#model-debug').addClass('blue') | $('#model-debug').addClass('blue') | ||||
let TrainDuration = runtime(time) | |||||
$('#model-duration').text(TrainDuration) | |||||
// let TrainDuration = runtime(time) | |||||
// $('#model-duration').text(TrainDuration) | |||||
@@ -0,0 +1,27 @@ | |||||
{{if .Dirs}} | |||||
<table id="repo-files-table" class="ui single line table"> | |||||
<tbody> | |||||
{{range .Dirs}} | |||||
<tr> | |||||
<td class="name four wide"> | |||||
<span class="truncate"> | |||||
<span class="octicon octicon-file-directory"></span> | |||||
<a class="title" href="{{if .IsDir}}{{$.RepoLink}}/modelarts/train-job/{{$.JobID}}/models?parentDir={{.ParenDir}}{{else}}{{$.RepoLink}}/modelarts/train-job/{{$.JobID}}/download_model?parentDir={{.ParenDir}}&fileName={{.FileName}}&jobName={{$.task.JobName}}{{end}}"> | |||||
<span class="fitted">{{if .IsDir}} {{svg "octicon-file-directory" 16}}{{else}}{{svg "octicon-file" 16}}{{end}}</span> {{.FileName}} | |||||
</a> | |||||
</span> | |||||
</td> | |||||
<td class="message nine wide"> | |||||
<span class="truncate has-emoji"> | |||||
{{.Size | FileSize}} | |||||
</span> | |||||
</td> | |||||
<td class="text right age three wide"> | |||||
<span class="time-since poping up">{{.ModTime}}</span> | |||||
</td> | |||||
</tr> | |||||
{{end}} | |||||
</tbody> | |||||
</table> | |||||
{{end}} |
@@ -0,0 +1,29 @@ | |||||
{{template "base/head" .}} | |||||
<div class="repository dataset dir-list view"> | |||||
{{template "repo/header" .}} | |||||
<form class="ui container"> | |||||
<div class="ui stackable grid {{if .Error}}hide{{end}}" id="dir-content"> | |||||
<div class="row"> | |||||
<div class="column sixteen wide"> | |||||
<p> | |||||
{{ range $index, $item := .Path }}<a href='{{$.Link}}/?parentDir={{if gt $index 0}}{{DatasetPathJoin $.Path $index "/"}}{{else}}{{end}}'>{{ $item }}</a><span class="directory-seperator">/</span>{{ end }} | |||||
</p> | |||||
</div> | |||||
</div> | |||||
</div> | |||||
<div class="ui grid"> | |||||
<div class="row"> | |||||
<div class="ui sixteen wide column"> | |||||
<div class="dir list"> | |||||
{{template "repo/modelarts/trainjob/models/dir_list" .}} | |||||
</div> | |||||
</div> | |||||
</div> | |||||
</div> | |||||
</form> | |||||
</div> | |||||
{{template "base/footer" .}} |