@@ -2,6 +2,7 @@ package models | |||
import ( | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
"strconv" | |||
"strings" | |||
@@ -1944,3 +1945,51 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er | |||
} | |||
return cloudbrains, count, nil | |||
} | |||
type DatasetInfo struct { | |||
DataLocalPath string | |||
Name string | |||
} | |||
func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) { | |||
var datasetNames string | |||
uuids := strings.Split(uuidStr, ";") | |||
if len(uuids) > setting.MaxDatasetNum { | |||
log.Error("the dataset count(%d) exceed the limit", len(uuids)) | |||
return nil, datasetNames, errors.New("the dataset count exceed the limit") | |||
} | |||
datasetInfos := make(map[string]DatasetInfo) | |||
for i, uuid := range uuids { | |||
attach, err := GetAttachmentByUUID(uuid) | |||
if err != nil { | |||
log.Error("GetAttachmentByUUID failed: %v", err) | |||
return nil, datasetNames, err | |||
} | |||
for _, datasetInfo := range datasetInfos { | |||
if attach.Name == datasetInfo.Name { | |||
log.Error("the dataset name is same: %v", attach.Name) | |||
return nil, datasetNames, errors.New("the dataset name is same") | |||
} | |||
} | |||
dataLocalPath := setting.Attachment.Minio.RealPath + | |||
setting.Attachment.Minio.Bucket + "/" + | |||
setting.Attachment.Minio.BasePath + | |||
AttachmentRelativePath(uuid) + | |||
uuid | |||
datasetInfos[uuid] = DatasetInfo{ | |||
DataLocalPath: dataLocalPath, | |||
Name: strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz"), | |||
} | |||
if i == 0 { | |||
datasetNames = attach.Name | |||
} else { | |||
datasetNames += ";" + attach.Name | |||
} | |||
} | |||
return datasetInfos, datasetNames, nil | |||
} |
@@ -44,11 +44,6 @@ var ( | |||
TrainResourceSpecs *models.ResourceSpecs | |||
) | |||
type DatasetInfo struct { | |||
DataLocalPath string | |||
Name string | |||
} | |||
type GenerateCloudBrainTaskReq struct { | |||
Ctx *context.Context | |||
DisplayJobName string | |||
@@ -69,7 +64,7 @@ type GenerateCloudBrainTaskReq struct { | |||
CommitID string | |||
Uuids string | |||
DatasetNames string | |||
DatasetInfos map[string]DatasetInfo | |||
DatasetInfos map[string]models.DatasetInfo | |||
BenchmarkTypeID int | |||
BenchmarkChildTypeID int | |||
ResourceSpecId int | |||
@@ -394,11 +389,6 @@ func IsBenchmarkJob(jobType string) bool { | |||
} | |||
func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) error { | |||
dataActualPath := setting.Attachment.Minio.RealPath + | |||
setting.Attachment.Minio.Bucket + "/" + | |||
setting.Attachment.Minio.BasePath + | |||
models.AttachmentRelativePath(task.Uuid) + | |||
task.Uuid | |||
jobName := task.JobName | |||
var resourceSpec *models.ResourceSpec | |||
@@ -416,6 +406,70 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e | |||
return errors.New("no such resourceSpec") | |||
} | |||
datasetInfos, _, err := models.GetDatasetInfo(task.Uuid) | |||
if err != nil { | |||
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"]) | |||
return err | |||
} | |||
volumes := []models.Volume{ | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, CodeMountPath+"/"), | |||
MountPath: CodeMountPath, | |||
ReadOnly: false, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, ModelMountPath+"/"), | |||
MountPath: ModelMountPath, | |||
ReadOnly: false, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, BenchMarkMountPath+"/"), | |||
MountPath: BenchMarkMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, Snn4imagenetMountPath+"/"), | |||
MountPath: Snn4imagenetMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, BrainScoreMountPath+"/"), | |||
MountPath: BrainScoreMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
} | |||
if len(datasetInfos) == 1 { | |||
volumes = append(volumes, models.Volume{ | |||
HostPath: models.StHostPath{ | |||
Path: datasetInfos[task.Uuid].DataLocalPath, | |||
MountPath: DataSetMountPath, | |||
ReadOnly: true, | |||
}, | |||
}) | |||
} else { | |||
for _, dataset := range datasetInfos { | |||
volumes = append(volumes, models.Volume{ | |||
HostPath: models.StHostPath{ | |||
Path: dataset.DataLocalPath, | |||
MountPath: DataSetMountPath + "/" + dataset.Name, | |||
ReadOnly: true, | |||
}, | |||
}) | |||
} | |||
} | |||
createTime := timeutil.TimeStampNow() | |||
jobResult, err := CreateJob(jobName, models.CreateJobParams{ | |||
JobName: jobName, | |||
@@ -438,50 +492,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e | |||
UseNNI: false, | |||
}, | |||
}, | |||
Volumes: []models.Volume{ | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, CodeMountPath+"/"), | |||
MountPath: CodeMountPath, | |||
ReadOnly: false, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: dataActualPath, | |||
MountPath: DataSetMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, ModelMountPath+"/"), | |||
MountPath: ModelMountPath, | |||
ReadOnly: false, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, BenchMarkMountPath+"/"), | |||
MountPath: BenchMarkMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, Snn4imagenetMountPath+"/"), | |||
MountPath: Snn4imagenetMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
{ | |||
HostPath: models.StHostPath{ | |||
Path: storage.GetMinioPath(jobName, BrainScoreMountPath+"/"), | |||
MountPath: BrainScoreMountPath, | |||
ReadOnly: true, | |||
}, | |||
}, | |||
}, | |||
Volumes: volumes, | |||
}) | |||
if err != nil { | |||
log.Error("CreateJob failed:%v", err.Error(), ctx.Data["MsgID"]) | |||
@@ -504,6 +515,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e | |||
JobType: task.JobType, | |||
Type: task.Type, | |||
Uuid: task.Uuid, | |||
DatasetName: task.DatasetName, | |||
Image: task.Image, | |||
GpuQueue: task.GpuQueue, | |||
ResourceSpecId: task.ResourceSpecId, | |||
@@ -51,6 +51,7 @@ const ( | |||
Lines = 500 | |||
TrainUrl = "train_url" | |||
DataUrl = "data_url" | |||
DatasUrl = "datas_url" | |||
ResultUrl = "result_url" | |||
CkptUrl = "ckpt_url" | |||
DeviceTarget = "device_target" | |||
@@ -274,11 +274,11 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||
} | |||
} | |||
datasetInfos, datasetNames, err := getDatasetInfo(uuids) | |||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuids) | |||
if err != nil { | |||
log.Error("getDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
cloudBrainNewDataPrepare(ctx) | |||
ctx.RenderWithErr("getDatasetInfo failed", tpl, &form) | |||
ctx.RenderWithErr("GetDatasetInfo failed", tpl, &form) | |||
return | |||
} | |||
@@ -605,12 +605,6 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo | |||
} | |||
} | |||
attachment, err := models.GetAttachmentByUUID(task.Uuid) | |||
if err == nil { | |||
ctx.Data["datasetname"] = attachment.Name | |||
} else { | |||
ctx.Data["datasetname"] = "" | |||
} | |||
ctx.Data["task"] = task | |||
ctx.Data["jobName"] = task.JobName | |||
@@ -2042,11 +2036,11 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo | |||
} | |||
uuid := childInfo.Attachment | |||
datasetInfos, datasetNames, err := getDatasetInfo(uuid) | |||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid) | |||
if err != nil { | |||
log.Error("getDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
cloudBrainNewDataPrepare(ctx) | |||
ctx.RenderWithErr("getDatasetInfo failed", tplCloudBrainBenchmarkNew, &form) | |||
ctx.RenderWithErr("GetDatasetInfo failed", tplCloudBrainBenchmarkNew, &form) | |||
return | |||
} | |||
@@ -2170,11 +2164,11 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) | |||
command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description)) | |||
} | |||
datasetInfos, datasetNames, err := getDatasetInfo(uuid) | |||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid) | |||
if err != nil { | |||
log.Error("getDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | |||
cloudBrainNewDataPrepare(ctx) | |||
ctx.RenderWithErr("getDatasetInfo failed", tpl, &form) | |||
ctx.RenderWithErr("GetDatasetInfo failed", tpl, &form) | |||
return | |||
} | |||
@@ -2315,46 +2309,3 @@ func GetBenchmarkTypes(ctx *context.Context) *models.BenchmarkTypes { | |||
} | |||
return benchmarkTypesMap[lang] | |||
} | |||
func getDatasetInfo(uuidStr string) (map[string]cloudbrain.DatasetInfo, string, error) { | |||
var datasetNames string | |||
uuids := strings.Split(uuidStr, ";") | |||
if len(uuids) > setting.MaxDatasetNum { | |||
log.Error("the dataset count(%d) exceed the limit", len(uuids)) | |||
return nil, datasetNames, errors.New("the dataset count exceed the limit") | |||
} | |||
datasetInfos := make(map[string]cloudbrain.DatasetInfo) | |||
for i, uuid := range uuids { | |||
attach, err := models.GetAttachmentByUUID(uuid) | |||
if err != nil { | |||
log.Error("GetAttachmentByUUID failed: %v", err) | |||
return nil, datasetNames, err | |||
} | |||
for _, datasetInfo := range datasetInfos { | |||
if attach.Name == datasetInfo.Name { | |||
log.Error("the dataset name is same: %v", attach.Name) | |||
return nil, datasetNames, errors.New("the dataset name is same") | |||
} | |||
} | |||
dataLocalPath := setting.Attachment.Minio.RealPath + | |||
setting.Attachment.Minio.Bucket + "/" + | |||
setting.Attachment.Minio.BasePath + | |||
models.AttachmentRelativePath(uuid) + | |||
uuid | |||
datasetInfos[uuid] = cloudbrain.DatasetInfo{ | |||
DataLocalPath: dataLocalPath, | |||
Name: strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz"), | |||
} | |||
if i == 0 { | |||
datasetNames = attach.Name | |||
} else { | |||
datasetNames += ";" + attach.Name | |||
} | |||
} | |||
return datasetInfos, datasetNames, nil | |||
} |
@@ -288,10 +288,16 @@ func NotebookShow(ctx *context.Context) { | |||
datasetDownloadLink := "" | |||
if ctx.IsSigned { | |||
if task.Uuid != "" && task.UserID == ctx.User.ID { | |||
attachment, err := models.GetAttachmentByUUID(task.Uuid) | |||
uuidList := strings.Split(task.Uuid, ";") | |||
for _, uuidStr := range uuidList { | |||
attachment, err := models.GetAttachmentByUUID(uuidStr) | |||
if err == nil { | |||
datasetDownloadLink = datasetDownloadLink + attachment.S3DownloadURL() + ";" | |||
} | |||
} | |||
datasetName, err := GetDatasetNameByUUID(task.Uuid) | |||
if err == nil { | |||
task.DatasetName = attachment.Name | |||
datasetDownloadLink = attachment.S3DownloadURL() | |||
task.DatasetName = datasetName | |||
} | |||
} | |||
} | |||
@@ -981,7 +987,6 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
ctx.RenderWithErr("GetDatasetNameByUUID error", tplModelArtsTrainJobNew, &form) | |||
return | |||
} | |||
dataPath := GetObsDataPathByUUID(uuid) | |||
count, err := models.GetCloudbrainTrainJobCountByUserID(ctx.User.ID) | |||
if err != nil { | |||
@@ -1092,6 +1097,14 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) | |||
Value: modelarts.Ascend, | |||
}) | |||
} | |||
dataUrl, datasUrl, isMultiDataset := GetObsDataPathByUUID(uuid) | |||
dataPath := dataUrl | |||
if isMultiDataset { | |||
param = append(param, models.Parameter{ | |||
Label: modelarts.DatasUrl, | |||
Value: datasUrl, | |||
}) | |||
} | |||
//save param config | |||
if isSaveParam == "on" { | |||
@@ -1240,7 +1253,6 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||
ctx.RenderWithErr("GetDatasetNameByUUID error", tplModelArtsTrainJobVersionNew, &form) | |||
return | |||
} | |||
dataPath := GetObsDataPathByUUID(uuid) | |||
canNewJob, _ := canUserCreateTrainJobVersion(ctx, latestTask.UserID) | |||
if !canNewJob { | |||
@@ -1326,6 +1338,14 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ | |||
Value: modelarts.Ascend, | |||
}) | |||
} | |||
dataUrl, datasUrl, isMultiDataset := GetObsDataPathByUUID(uuid) | |||
dataPath := dataUrl | |||
if isMultiDataset { | |||
param = append(param, models.Parameter{ | |||
Label: modelarts.DatasUrl, | |||
Value: datasUrl, | |||
}) | |||
} | |||
//save param config | |||
if isSaveParam == "on" { | |||
@@ -2445,20 +2465,27 @@ func TrainJobDownloadLogFile(ctx *context.Context) { | |||
ctx.Resp.Header().Set("Cache-Control", "max-age=0") | |||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||
} | |||
func GetObsDataPathByUUID(uuid string) string { | |||
var obsDataPath string | |||
uuidList := strings.Split(uuid, ";") | |||
for k, _ := range uuidList { | |||
if k <= 0 { | |||
obsDataPath = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/" | |||
} | |||
if k > 0 { | |||
obsDataPathNext := ";" + "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/" | |||
obsDataPath = obsDataPath + obsDataPathNext | |||
func GetObsDataPathByUUID(uuidStr string) (string, string, bool) { | |||
var dataUrl string | |||
var datasUrl string | |||
uuidList := strings.Split(uuidStr, ";") | |||
if len(uuidList) <= 1 { | |||
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuidStr[0:1], uuidStr[1:2]) + "/" + uuidStr + uuidStr + "/" | |||
datasUrl = "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuidStr[0:1], uuidStr[1:2]) + "/" + uuidStr + uuidStr + "/" | |||
isMultiDataset := false | |||
return dataUrl, datasUrl, isMultiDataset | |||
} else { | |||
for k, uuid := range uuidList { | |||
if k > 0 { | |||
datasUrlNext := ";" + "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/" | |||
datasUrl = datasUrl + datasUrlNext | |||
} | |||
} | |||
firstDataset := uuidList[0] | |||
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(firstDataset[0:1], firstDataset[1:2]) + "/" + firstDataset + firstDataset + "/" | |||
isMultiDataset := true | |||
return dataUrl, datasUrl, isMultiDataset | |||
} | |||
return obsDataPath | |||
} | |||
func GetDatasetNameByUUID(uuid string) (string, error) { | |||
uuidList := strings.Split(uuid, ";") | |||
@@ -474,7 +474,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w" id="{{.VersionName}}-mirror"> | |||
{{$.datasetname}} | |||
{{.DatasetName}} | |||
</div> | |||
</td> | |||
</tr> | |||
@@ -409,7 +409,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w" | |||
id="{{.VersionName}}-BenchmarkTypeName"> | |||
{{$.datasetname}} | |||
{{.DatasetName}} | |||
</div> | |||
</td> | |||
</tr> | |||