@@ -576,7 +576,7 @@ func AttachmentsByDatasetOption(datasets []int64, opts *SearchDatasetOptions) ([ | |||||
cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | ||||
} | } | ||||
if opts.PublicOnly { | if opts.PublicOnly { | ||||
cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | |||||
cond = cond.And(builder.Eq{"attachment.is_private": false}) | |||||
} | } | ||||
if opts.CloudBrainType >= 0 { | if opts.CloudBrainType >= 0 { | ||||
cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType}) | cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType}) | ||||
@@ -1971,48 +1971,46 @@ type DatasetInfo struct { | |||||
Name string | Name string | ||||
} | } | ||||
func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, string, error) { | |||||
var datasetNames, fileNames string | |||||
func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) { | |||||
var datasetNames string | |||||
uuids := strings.Split(uuidStr, ";") | uuids := strings.Split(uuidStr, ";") | ||||
if len(uuids) > setting.MaxDatasetNum { | if len(uuids) > setting.MaxDatasetNum { | ||||
log.Error("the dataset count(%d) exceed the limit", len(uuids)) | log.Error("the dataset count(%d) exceed the limit", len(uuids)) | ||||
return nil, datasetNames, fileNames, errors.New("the dataset count exceed the limit") | |||||
return nil, datasetNames, errors.New("the dataset count exceed the limit") | |||||
} | } | ||||
datasetInfos := make(map[string]DatasetInfo) | datasetInfos := make(map[string]DatasetInfo) | ||||
for i, uuid := range uuids { | |||||
attach, err := GetAttachmentByUUID(uuid) | |||||
if err != nil { | |||||
log.Error("GetAttachmentByUUID failed: %v", err) | |||||
return nil, datasetNames, fileNames, err | |||||
} | |||||
attachs, err := GetAttachmentsByUUIDs(uuids) | |||||
if err != nil { | |||||
log.Error("GetAttachmentsByUUIDs failed: %v", err) | |||||
return nil, datasetNames, err | |||||
} | |||||
for i, attach := range attachs { | |||||
//todo: check same name | |||||
for _, datasetInfo := range datasetInfos { | for _, datasetInfo := range datasetInfos { | ||||
if attach.Name == datasetInfo.Name { | if attach.Name == datasetInfo.Name { | ||||
log.Error("the dataset name is same: %v", attach.Name) | log.Error("the dataset name is same: %v", attach.Name) | ||||
return nil, datasetNames, fileNames, errors.New("the dataset name is same") | |||||
return nil, datasetNames, errors.New("the dataset name is same") | |||||
} | } | ||||
} | } | ||||
dataLocalPath := setting.Attachment.Minio.RealPath + | dataLocalPath := setting.Attachment.Minio.RealPath + | ||||
setting.Attachment.Minio.Bucket + "/" + | setting.Attachment.Minio.Bucket + "/" + | ||||
setting.Attachment.Minio.BasePath + | setting.Attachment.Minio.BasePath + | ||||
AttachmentRelativePath(uuid) + | |||||
uuid | |||||
AttachmentRelativePath(attach.UUID) + | |||||
attach.UUID | |||||
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz") | fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz") | ||||
datasetInfos[uuid] = DatasetInfo{ | |||||
datasetInfos[attach.UUID] = DatasetInfo{ | |||||
DataLocalPath: dataLocalPath, | DataLocalPath: dataLocalPath, | ||||
Name: fileName, | Name: fileName, | ||||
} | } | ||||
if i == 0 { | if i == 0 { | ||||
datasetNames = attach.Name | datasetNames = attach.Name | ||||
fileNames = fileName | |||||
} else { | } else { | ||||
datasetNames += ";" + attach.Name | datasetNames += ";" + attach.Name | ||||
fileNames += "|" + fileName | |||||
} | } | ||||
} | } | ||||
return datasetInfos, datasetNames, fileNames, nil | |||||
return datasetInfos, datasetNames, nil | |||||
} | } |
@@ -406,7 +406,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e | |||||
return errors.New("no such resourceSpec") | return errors.New("no such resourceSpec") | ||||
} | } | ||||
datasetInfos, _, _, err := models.GetDatasetInfo(task.Uuid) | |||||
datasetInfos, _, err := models.GetDatasetInfo(task.Uuid) | |||||
if err != nil { | if err != nil { | ||||
log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"]) | log.Error("GetDatasetInfo failed:%v", err, ctx.Data["MsgID"]) | ||||
return err | return err | ||||
@@ -261,7 +261,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||||
} | } | ||||
} | } | ||||
datasetInfos, datasetNames, fileNames, err := models.GetDatasetInfo(uuids) | |||||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuids) | |||||
if err != nil { | if err != nil { | ||||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | ||||
cloudBrainNewDataPrepare(ctx) | cloudBrainNewDataPrepare(ctx) | ||||
@@ -272,7 +272,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { | |||||
command := cloudbrain.Command | command := cloudbrain.Command | ||||
if jobType == string(models.JobTypeTrain) { | if jobType == string(models.JobTypeTrain) { | ||||
tpl = tplCloudBrainTrainJobNew | tpl = tplCloudBrainTrainJobNew | ||||
commandTrain, err := getTrainJobCommand(form, fileNames) | |||||
commandTrain, err := getTrainJobCommand(form) | |||||
if err != nil { | if err != nil { | ||||
log.Error("getTrainJobCommand failed: %v", err) | log.Error("getTrainJobCommand failed: %v", err) | ||||
ctx.RenderWithErr(err.Error(), tpl, &form) | ctx.RenderWithErr(err.Error(), tpl, &form) | ||||
@@ -2036,7 +2036,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo | |||||
} | } | ||||
uuid := childInfo.Attachment | uuid := childInfo.Attachment | ||||
datasetInfos, datasetNames, _, err := models.GetDatasetInfo(uuid) | |||||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid) | |||||
if err != nil { | if err != nil { | ||||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | ||||
cloudBrainNewDataPrepare(ctx) | cloudBrainNewDataPrepare(ctx) | ||||
@@ -2164,7 +2164,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) | |||||
command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description)) | command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description)) | ||||
} | } | ||||
datasetInfos, datasetNames, _, err := models.GetDatasetInfo(uuid) | |||||
datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid) | |||||
if err != nil { | if err != nil { | ||||
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"]) | ||||
cloudBrainNewDataPrepare(ctx) | cloudBrainNewDataPrepare(ctx) | ||||
@@ -2246,7 +2246,7 @@ func CloudBrainTrainJobNew(ctx *context.Context) { | |||||
ctx.HTML(http.StatusOK, tplCloudBrainTrainJobNew) | ctx.HTML(http.StatusOK, tplCloudBrainTrainJobNew) | ||||
} | } | ||||
func getTrainJobCommand(form auth.CreateCloudBrainForm, fileNames string) (string, error) { | |||||
func getTrainJobCommand(form auth.CreateCloudBrainForm) (string, error) { | |||||
var command string | var command string | ||||
bootFile := strings.TrimSpace(form.BootFile) | bootFile := strings.TrimSpace(form.BootFile) | ||||
params := form.Params | params := form.Params | ||||
@@ -2270,8 +2270,6 @@ func getTrainJobCommand(form auth.CreateCloudBrainForm, fileNames string) (strin | |||||
} | } | ||||
} | } | ||||
param += " --dataset_list='" + fileNames + "'" | |||||
command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + form.DisplayJobName + "-" + cloudbrain.LogFile | command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + form.DisplayJobName + "-" + cloudbrain.LogFile | ||||
return command, nil | return command, nil | ||||