{{$.i18n.Tr "cloudbrain.task_delete_confirm"}}
+diff --git a/models/action.go b/models/action.go
index d0a763e08..4b6f1dbad 100755
--- a/models/action.go
+++ b/models/action.go
@@ -65,7 +65,6 @@ const (
ActionCreateImage //36
ActionImageRecommend //37
ActionChangeUserAvatar //38
-
)
// Action represents user operation type and other information to
diff --git a/models/cloudbrain.go b/models/cloudbrain.go
index 6797fe332..6135dac40 100755
--- a/models/cloudbrain.go
+++ b/models/cloudbrain.go
@@ -53,8 +53,11 @@ const (
JobFailed CloudbrainStatus = "FAILED"
JobRunning CloudbrainStatus = "RUNNING"
+ ModelSafetyTesting CloudbrainStatus = "TESTING"
+
JobTypeDebug JobType = "DEBUG"
JobTypeBenchmark JobType = "BENCHMARK"
+ JobTypeModelSafety JobType = "MODELSAFETY"
JobTypeSnn4imagenet JobType = "SNN4IMAGENET"
JobTypeBrainScore JobType = "BRAINSCORE"
JobTypeTrain JobType = "TRAIN"
@@ -172,26 +175,26 @@ type Cloudbrain struct {
ImageID string //grampus image_id
AiCenter string //grampus ai center: center_id+center_name
- TrainUrl string //输出模型的obs路径
- BranchName string //分支名称
- Parameters string //传给modelarts的param参数
- BootFile string //启动文件
- DataUrl string //数据集的obs路径
- LogUrl string //日志输出的obs路径
- PreVersionId int64 //父版本的版本id
- FlavorCode string //modelarts上的规格id
- Description string `xorm:"varchar(256)"` //描述
- WorkServerNumber int //节点数
- FlavorName string //规格名称
- EngineName string //引擎名称
- TotalVersionCount int //任务的所有版本数量,包括删除的
- LabelName string //标签名称
- ModelName string //模型名称
- ModelVersion string //模型版本
- CkptName string //权重文件名称
- PreTrainModelUrl string //预训练模型地址
- ResultUrl string //推理结果的obs路径
-
+ TrainUrl string //输出模型的obs路径
+ BranchName string //分支名称
+ Parameters string //传给modelarts的param参数
+ BootFile string //启动文件
+ DataUrl string //数据集的obs路径
+ LogUrl string //日志输出的obs路径
+ PreVersionId int64 //父版本的版本id
+ FlavorCode string //modelarts上的规格id
+ Description string `xorm:"varchar(256)"` //描述
+ WorkServerNumber int //节点数
+ FlavorName string //规格名称
+ EngineName string //引擎名称
+ TotalVersionCount int //任务的所有版本数量,包括删除的
+ LabelName string //标签名称
+ ModelName string //模型名称
+ ModelVersion string //模型版本
+ CkptName string //权重文件名称
+ PreTrainModelUrl string //预训练模型地址
+ ResultUrl string //推理结果的obs路径
+ ResultJson string `xorm:"varchar(4000)"`
User *User `xorm:"-"`
Repo *Repository `xorm:"-"`
BenchmarkType string `xorm:"-"` //算法评测,模型评测
@@ -2005,6 +2008,13 @@ func GetStoppedJobWithNoStartTimeEndTime() ([]*Cloudbrain, error) {
return cloudbrains, x.SQL("select * from cloudbrain where status in (?,?,?,?,?,?,?) and (start_time is null or end_time is null) limit 100", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).Find(&cloudbrains)
}
+func GetModelSafetyTestTask() ([]*Cloudbrain, error) {
+ cloudbrains := make([]*Cloudbrain, 0)
+ sess := x.Where("job_type = ?", string(JobTypeModelSafety))
+ err := sess.Find(&cloudbrains)
+ return cloudbrains, err
+}
+
func GetCloudbrainCountByUserID(userID int64, jobType string) (int, error) {
count, err := x.In("status", JobWaiting, JobRunning).And("job_type = ? and user_id = ? and type = ?", jobType, userID, TypeCloudBrainOne).Count(new(Cloudbrain))
return int(count), err
@@ -2019,7 +2029,12 @@ func GetCloudbrainRunCountByRepoID(repoID int64) (int, error) {
}
func GetBenchmarkCountByUserID(userID int64) (int, error) {
- count, err := x.In("status", JobWaiting, JobRunning).And("(job_type = ? or job_type = ? or job_type = ?) and user_id = ? and type = ?", string(JobTypeBenchmark), string(JobTypeBrainScore), string(JobTypeSnn4imagenet), userID, TypeCloudBrainOne).Count(new(Cloudbrain))
+ count, err := x.In("status", JobWaiting, JobRunning).And("(job_type = ? or job_type = ? or job_type = ?) and user_id = ? and type = ?", string(JobTypeBenchmark), string(JobTypeModelSafety), string(JobTypeBrainScore), string(JobTypeSnn4imagenet), userID, TypeCloudBrainOne).Count(new(Cloudbrain))
+ return int(count), err
+}
+
+func GetModelSafetyCountByUserID(userID int64) (int, error) {
+ count, err := x.In("status", JobWaiting, JobRunning).And("job_type = ? and user_id = ?", string(JobTypeModelSafety), userID).Count(new(Cloudbrain))
return int(count), err
}
diff --git a/modules/aisafety/resty.go b/modules/aisafety/resty.go
new file mode 100644
index 000000000..be6468529
--- /dev/null
+++ b/modules/aisafety/resty.go
@@ -0,0 +1,249 @@
+package aisafety
+
+import (
+ "crypto/md5"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "sort"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/go-resty/resty/v2"
+)
+
+var (
+ restyClient *resty.Client
+ HOST string
+ KEY string
+)
+
+type TaskReq struct {
+ UnionId string //评测任务ID,唯一标识,由后端生成UUID
+ EvalName string //评测任务名称
+ EvalContent string //评测任务描述
+ TLPath string //
+ Indicators []string //评测指标,由GetAlgorithmList接口返回的指标列表中的title属性值
+ CDPath string
+ CDName string //对抗数据集名称
+ BDPath string
+ BDName string //原数据集名称
+}
+
+type ReturnMsg struct {
+ Code string `json:"code"`
+ Msg string `json:"msg"`
+ Data ReturnData `json:"data"`
+ Times int64 `json:"times"`
+}
+
+type ReturnData struct {
+ ID int `json:"id"`
+ No string `json:"no"`
+ StandardJson string `json:"standardJson"`
+ Code int `json:"code"`
+ Msg string `json:"msg"`
+ Status int `json:"status"`
+}
+
+const (
+ APPID = "1"
+ LogPageSize = 500
+ LogPageTokenExpired = "5m"
+ pageSize = 15
+ Success = "S000"
+)
+
+func getRestyClient() *resty.Client {
+ if restyClient == nil {
+ restyClient = resty.New()
+ }
+ return restyClient
+}
+
+func checkSetting() {
+ if len(HOST) != 0 && len(KEY) != 0 {
+ return
+ }
+ _ = loginCloudbrain()
+}
+
+func loginCloudbrain() error {
+ HOST = "http://221.122.70.196:8081/atp-api"
+ KEY = "1"
+ return nil
+}
+
+func createSign(params map[string]interface{}, signKey string) string {
+ var sceneList []string
+ for k := range params {
+ sceneList = append(sceneList, k)
+ }
+ sort.Strings(sceneList)
+ re := ""
+ for _, key := range sceneList {
+ if params[key] != nil {
+ re += key + "=" + fmt.Sprint(params[key]) + "&"
+ }
+ }
+ re += "key=" + signKey
+ log.Info("sign key:" + re)
+ h := md5.New()
+ h.Write([]byte(re))
+ return strings.ToUpper(hex.EncodeToString(h.Sum(nil)))
+}
+
+func getParams(req TaskReq) (map[string]interface{}, string) {
+ params := make(map[string]interface{})
+ reStr := ""
+
+ params["unionId"] = req.UnionId
+ reStr += "unionId=" + req.UnionId
+ params["evalName"] = req.EvalName
+ reStr += "&evalName=" + req.EvalName
+ params["evalContent"] = req.EvalContent
+ reStr += "&evalContent=" + url.QueryEscape(req.EvalContent)
+ params["TLPath"] = req.TLPath
+ reStr += "&TLPath=" + url.QueryEscape(req.TLPath)
+
+ params["CDName"] = req.CDName
+ reStr += "&CDName=" + url.QueryEscape(req.CDName)
+
+ params["BDName"] = req.BDName
+ reStr += "&BDName=" + url.QueryEscape(req.BDName)
+
+ if req.CDPath != "" {
+ params["CDPath"] = req.CDPath
+ reStr += "&CDPath=" + url.QueryEscape(req.CDPath)
+ }
+ if req.BDPath != "" {
+ params["BDPath"] = req.BDPath
+ reStr += "&BDPath=" + url.QueryEscape(req.BDPath)
+ }
+ indicators := ""
+ if len(req.Indicators) > 0 {
+ for _, tmp := range req.Indicators {
+ indicators += tmp + "|"
+ }
+ }
+ if len(indicators) > 0 {
+ indicators = indicators[0 : len(indicators)-1]
+ }
+
+ params["Indicators"] = indicators
+ log.Info("indicators=" + indicators)
+ reStr += "&Indicators=" + url.QueryEscape(indicators)
+
+ return params, reStr
+}
+
+func CreateSafetyTask(req TaskReq, jsonstr string) (string, error) {
+ checkSetting()
+ client := getRestyClient()
+
+ //reqPara, _ := json.Marshal(body)
+ //log.Warn("job req:", jsonstr)
+
+ params, urlQuerys := getParams(req)
+
+ bodyMap := make(map[string]interface{})
+ //reJsonMap := make(map[string]interface{})
+ bodyMap["resJson"] = jsonstr
+ //bodyMap["externalEvalParam"] = reJsonMap
+
+ //reqPara, _ := json.Marshal(bodyMap)
+ //log.Warn("job req json:", string(reqPara))
+
+ res, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetHeader("appId", APPID).
+ SetHeader("sign", createSign(params, KEY)).
+ //SetAuthToken(TOKEN).
+ SetBody(bodyMap).
+ Post(HOST + "/v1/external/eval-standard/create?" + urlQuerys)
+ log.Info("url=" + HOST + "/v1/external/eval-standard/create?" + urlQuerys)
+
+ responseStr := string(res.Body())
+ log.Info("CreateSafetyTask responseStr=" + responseStr + " res code=" + fmt.Sprint(res.StatusCode()))
+
+ if err != nil {
+ return "", fmt.Errorf("resty create job: %s", err)
+ } else {
+ log.Info("result string=" + " res code=" + fmt.Sprint(res.StatusCode()))
+ }
+
+ reMap := make(map[string]interface{})
+
+ err = json.Unmarshal(res.Body(), &reMap)
+ if err == nil && reMap["code"] == "0" {
+ dataMap := (reMap["data"]).(map[string]interface{})
+ return fmt.Sprint(dataMap["serialNo"]), nil
+ }
+ return "", nil
+}
+
+func GetAlgorithmList() (map[string]interface{}, error) {
+ checkSetting()
+ client := getRestyClient()
+ params := make(map[string]interface{})
+
+ jsonResult := make(map[string]interface{})
+ sign := createSign(params, KEY)
+
+ res, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetHeader("appId", APPID).
+ SetHeader("sign", sign).
+ Get(HOST + "/v1/external/eval-standard/algorithmList")
+
+ log.Info("url=" + HOST + "/v1/external/eval-standard/algorithmList" + " sign=" + sign + " appId=" + APPID)
+
+ jsonerr := json.Unmarshal(res.Body(), &jsonResult)
+ if jsonerr == nil {
+ log.Info("jsonResult code=" + fmt.Sprint(jsonResult["msg"]))
+ }
+ responseStr := string(res.Body())
+ log.Info("GetAlgorithmList responseStr=" + responseStr + " res code=" + fmt.Sprint(res.StatusCode()))
+
+ if err != nil {
+ log.Info("error =" + err.Error())
+ return nil, fmt.Errorf("resty GetJob: %v", err)
+ } else {
+ reMap := make(map[string]interface{})
+ err = json.Unmarshal(res.Body(), &reMap)
+ if err == nil && reMap["code"] == "0" {
+ return reMap, nil
+ } else {
+ return nil, fmt.Errorf("get error,code not 0")
+ }
+ }
+
+}
+
+func GetTaskStatus(jobID string) (*ReturnMsg, error) {
+ checkSetting()
+ client := getRestyClient()
+ var reMap ReturnMsg
+ params := make(map[string]interface{})
+ params["serialNo"] = jobID
+
+ res, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetHeader("appId", APPID).
+ SetHeader("sign", createSign(params, KEY)).
+ SetResult(&reMap).
+ Get(HOST + "/v1/external/eval-standard/query?serialNo=" + jobID)
+
+ log.Info("url=" + HOST + "/v1/external/eval-standard/query?serialNo=" + jobID)
+ responseStr := string(res.Body())
+ log.Info("GetTaskStatus responseStr=" + responseStr + " res code=" + fmt.Sprint(res.StatusCode()))
+
+ if err != nil {
+ log.Info("error =" + err.Error())
+ return nil, fmt.Errorf("Get task status error: %v", err)
+ } else {
+ return &reMap, nil
+ }
+}
diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go
index 4e527b6bf..e57bd8d7e 100755
--- a/modules/cloudbrain/cloudbrain.go
+++ b/modules/cloudbrain/cloudbrain.go
@@ -20,19 +20,19 @@ import (
const (
//Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"`
//CommandBenchmark = `echo "start benchmark";python /code/test.py;echo "end benchmark"`
- CommandBenchmark = `cd /benchmark && bash run_bk.sh >/model/benchmark-log.txt`
- CodeMountPath = "/code"
- DataSetMountPath = "/dataset"
- ModelMountPath = "/model"
+ CommandBenchmark = `cd /benchmark && bash run_bk.sh >/model/benchmark-log.txt`
+ CodeMountPath = "/code"
+ DataSetMountPath = "/dataset"
+ ModelMountPath = "/model"
PretrainModelMountPath = "/pretrainmodel"
- LogFile = "log.txt"
- BenchMarkMountPath = "/benchmark"
- BenchMarkResourceID = 1
- Snn4imagenetMountPath = "/snn4imagenet"
- BrainScoreMountPath = "/brainscore"
- TaskInfoName = "/taskInfo"
- Snn4imagenetCommand = `/opt/conda/bin/python /snn4imagenet/testSNN_script.py --modelname '%s' --modelpath '/dataset' --modeldescription '%s' >/model/benchmark-log.txt`
- BrainScoreCommand = `bash /brainscore/brainscore_test_par4shSrcipt.sh -b '%s' -n '%s' -p '/dataset' -d '%s' >/model/benchmark-log.txt`
+ LogFile = "log.txt"
+ BenchMarkMountPath = "/benchmark"
+ BenchMarkResourceID = 1
+ Snn4imagenetMountPath = "/snn4imagenet"
+ BrainScoreMountPath = "/brainscore"
+ TaskInfoName = "/taskInfo"
+ Snn4imagenetCommand = `/opt/conda/bin/python /snn4imagenet/testSNN_script.py --modelname '%s' --modelpath '/dataset' --modeldescription '%s' >/model/benchmark-log.txt`
+ BrainScoreCommand = `bash /brainscore/brainscore_test_par4shSrcipt.sh -b '%s' -n '%s' -p '/dataset' -d '%s' >/model/benchmark-log.txt`
SubTaskName = "task1"
@@ -405,7 +405,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
}
func IsBenchmarkJob(jobType string) bool {
- return string(models.JobTypeBenchmark) == jobType || string(models.JobTypeBrainScore) == jobType || string(models.JobTypeSnn4imagenet) == jobType
+ return string(models.JobTypeModelSafety) == jobType || string(models.JobTypeBenchmark) == jobType || string(models.JobTypeBrainScore) == jobType || string(models.JobTypeSnn4imagenet) == jobType
}
func GetWaitingCloudbrainCount(cloudbrainType int, computeResource string, jobTypes ...models.JobType) int64 {
diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go
index 04cd7fe41..8dbc8d1ed 100755
--- a/modules/cron/tasks_basic.go
+++ b/modules/cron/tasks_basic.go
@@ -5,12 +5,13 @@
package cron
import (
- "code.gitea.io/gitea/services/reward"
- "code.gitea.io/gitea/services/cloudbrain/resource"
- "code.gitea.io/gitea/modules/modelarts"
"context"
"time"
+ "code.gitea.io/gitea/modules/modelarts"
+ "code.gitea.io/gitea/services/cloudbrain/resource"
+ "code.gitea.io/gitea/services/reward"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/migrations"
repository_service "code.gitea.io/gitea/modules/repository"
@@ -122,6 +123,17 @@ func registerHandleUnDecompressAttachment() {
})
}
+func registerHandleModelSafetyTask() {
+ RegisterTaskFatal("handle_modelsafety_task", &BaseConfig{
+ Enabled: true,
+ RunAtStart: true,
+ Schedule: "@every 5m",
+ }, func(ctx context.Context, _ *models.User, _ Config) error {
+ repo.TimerHandleModelSafetyTestTask()
+ return nil
+ })
+}
+
func registerHandleBlockChainUnSuccessUsers() {
RegisterTaskFatal("handle_blockchain_unsuccess_users", &BaseConfig{
Enabled: true,
@@ -279,4 +291,6 @@ func initBasicTasks() {
//registerRewardPeriodTask()
registerCloudbrainPointDeductTask()
+
+ registerHandleModelSafetyTask()
}
diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go
index 6b3d1f128..06521993e 100755
--- a/modules/modelarts/modelarts.go
+++ b/modules/modelarts/modelarts.go
@@ -1,7 +1,6 @@
package modelarts
import (
- "code.gitea.io/gitea/modules/modelarts_cd"
"encoding/json"
"errors"
"fmt"
@@ -9,6 +8,8 @@ import (
"strconv"
"strings"
+ "code.gitea.io/gitea/modules/modelarts_cd"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
@@ -70,8 +71,8 @@ const (
var (
poolInfos *models.PoolInfos
TrainFlavorInfos *Flavor
- SpecialPools *models.SpecialPools
- MultiNodeConfig *MultiNodes
+ SpecialPools *models.SpecialPools
+ MultiNodeConfig *MultiNodes
)
type GenerateTrainJobReq struct {
@@ -141,6 +142,7 @@ type GenerateInferenceJobReq struct {
ResultUrl string
Spec *models.Specification
DatasetName string
+ JobType string
}
type VersionInfo struct {
@@ -173,12 +175,12 @@ type ResourcePool struct {
} `json:"resource_pool"`
}
-type MultiNodes struct{
+type MultiNodes struct {
Info []OrgMultiNode `json:"multinode"`
}
-type OrgMultiNode struct{
+type OrgMultiNode struct {
Org string `json:"org"`
- Node []int `json:"node"`
+ Node []int `json:"node"`
}
// type Parameter struct {
@@ -709,7 +711,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
Status: models.TempJobStatus,
Type: models.TypeCloudBrainTwo,
JobName: req.JobName,
- JobType: string(models.JobTypeInference),
+ JobType: req.JobType,
})
if err != nil {
log.Error("InsertCloudbrainTemp failed: %v", err.Error())
@@ -732,7 +734,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
JobID: jobID,
JobName: req.JobName,
DisplayJobName: req.DisplayJobName,
- JobType: string(models.JobTypeInference),
+ JobType: req.JobType,
Type: models.TypeCloudBrainTwo,
VersionID: jobResult.VersionID,
VersionName: jobResult.VersionName,
@@ -769,7 +771,15 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error())
return err
}
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask)
+ if req.JobType == string(models.JobTypeModelSafety) {
+ task, err := models.GetCloudbrainByJobID(jobID)
+ if err == nil {
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, fmt.Sprint(task.ID), req.DisplayJobName, models.ActionCreateBenchMarkTask)
+ }
+ } else {
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask)
+ }
+
return nil
}
@@ -798,8 +808,8 @@ func InitSpecialPool() {
}
}
-func InitMultiNode(){
- if MultiNodeConfig ==nil && setting.ModelArtsMultiNode!=""{
+func InitMultiNode() {
+ if MultiNodeConfig == nil && setting.ModelArtsMultiNode != "" {
json.Unmarshal([]byte(setting.ModelArtsMultiNode), &MultiNodeConfig)
}
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
index 7332be5e9..c6afae05a 100755
--- a/modules/setting/setting.go
+++ b/modules/setting/setting.go
@@ -707,6 +707,19 @@ var (
NPU_MINDSPORE_IMAGE_ID int
NPU_TENSORFLOW_IMAGE_ID int
}{}
+
+ ModelSafetyTest = struct {
+ NPUBaseDataSetName string
+ NPUBaseDataSetUUID string
+ NPUCombatDataSetName string
+ NPUCombatDataSetUUID string
+
+ GPUBaseDataSetName string
+ GPUBaseDataSetUUID string
+ GPUCombatDataSetName string
+ GPUCombatDataSetUUID string
+ }{}
+
ModelApp = struct {
DesensitizationUrl string
}{}
@@ -1530,9 +1543,22 @@ func NewContext() {
getGrampusConfig()
getModelartsCDConfig()
getModelConvertConfig()
+ getModelSafetyConfig()
getModelAppConfig()
}
+func getModelSafetyConfig() {
+ sec := Cfg.Section("model_safety_test")
+ ModelSafetyTest.GPUBaseDataSetName = sec.Key("GPUBaseDataSetName").MustString("")
+ ModelSafetyTest.GPUBaseDataSetUUID = sec.Key("GPUBaseDataSetUUID").MustString("")
+ ModelSafetyTest.GPUCombatDataSetName = sec.Key("GPUCombatDataSetName").MustString("")
+ ModelSafetyTest.GPUCombatDataSetUUID = sec.Key("GPUCombatDataSetUUID").MustString("")
+ ModelSafetyTest.NPUBaseDataSetName = sec.Key("NPUBaseDataSetName").MustString("")
+ ModelSafetyTest.NPUBaseDataSetUUID = sec.Key("NPUBaseDataSetUUID").MustString("")
+ ModelSafetyTest.NPUCombatDataSetName = sec.Key("NPUCombatDataSetName").MustString("")
+ ModelSafetyTest.NPUCombatDataSetUUID = sec.Key("NPUCombatDataSetUUID").MustString("")
+}
+
func getModelConvertConfig() {
sec := Cfg.Section("model_convert")
ModelConvert.GPU_PYTORCH_IMAGE = sec.Key("GPU_PYTORCH_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:tensorRT_7_zouap")
diff --git a/modules/timer/timer.go b/modules/timer/timer.go
deleted file mode 100755
index a0220380c..000000000
--- a/modules/timer/timer.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package timer
-
-import (
- "github.com/robfig/cron/v3"
-
- "code.gitea.io/gitea/modules/log"
- "code.gitea.io/gitea/routers/repo"
-)
-
-func LaunchCronJob() {
- log.Trace("Run cron job")
-
- c := cron.New()
-
- spec := "*/10 * * * *"
- c.AddFunc(spec, repo.HandleUnDecompressAttachment)
-
- specCheckBlockChainUserSuccess := "*/10 * * * *"
- c.AddFunc(specCheckBlockChainUserSuccess, repo.HandleBlockChainUnSuccessUsers)
-
- specCheckRepoBlockChainSuccess := "*/1 * * * *"
- c.AddFunc(specCheckRepoBlockChainSuccess, repo.HandleBlockChainUnSuccessRepos)
-
- specCheckUnTransformedPRs := "*/1 * * * *"
- c.AddFunc(specCheckUnTransformedPRs, repo.HandleBlockChainMergedPulls)
-
- specCheckBlockChainCommitSuccess := "*/3 * * * *"
- c.AddFunc(specCheckBlockChainCommitSuccess, repo.HandleBlockChainUnSuccessCommits)
- c.Start()
-}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 282ff9418..773a338c1 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -3262,7 +3262,19 @@ expected_time = , expected to be available for
points_acquisition_instructions = Points Acquisition Instructions
insufficient_points_balance = Insufficient points balance
+[modelsafety]
+model_security_evaluation = Model Security Evaluation
+base_data_set = Base Data Set
+combat_data_set = Combat Data Set
+evaluation_indicators = Evaluation Indicators
+evaluation_result = Evaluation Result
+no_data = No Data
+untargetted = Untargetted
+targetted = targetted
+new_model_security_evaluation_tips = Model Security Evaluation just used for image classification
+
[model_app]
get_file_fail= Can not get the image content, please try again later.
content_type_unsupported=Allowed image type is jpg, jpeg or png.
process_image_fail=Fail to process image, please try again later.
+
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index 979da91d5..8ba4d252d 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -3281,8 +3281,19 @@ expected_time = ,预计可用
points_acquisition_instructions = 积分获取说明
insufficient_points_balance = 积分余额不足
+[modelsafety]
+model_security_evaluation = 模型安全评测
+base_data_set = 原始数据集
+combat_data_set = 对抗数据集
+evaluation_indicators = 评测指标
+evaluation_result = 评测结果
+no_data = 无数据
+untargetted = 非定向
+targetted = 定向
+new_model_security_evaluation_tips = 模型安全评测只适用于图像分类
[model_app]
get_file_fail= 获取上传文件失败,请稍后再试。
content_type_unsupported=请上传jpg、jpeg或png图片。
process_image_fail=图片处理失败,请稍后再试。
+
diff --git a/public/home/home.js b/public/home/home.js
index c1849b3e3..853c3ef23 100755
--- a/public/home/home.js
+++ b/public/home/home.js
@@ -81,8 +81,11 @@ var isZh = true;
if(lang != null && lang.nodeValue =="en-US" ){
isZh=false;
}
-
document.onreadystatechange = function () {
+ if(document.readyState != "complete"){
+ return;
+ }
+ console.log("Start to open WebSocket." + document.readyState);
queryRecommendData();
var output = document.getElementById("newmessage");
@@ -102,10 +105,9 @@ document.onreadystatechange = function () {
var html = "";
if (data != null){
if(messageQueue.length > maxSize){
- delete messageQueue[0];
- }else{
- messageQueue.push(data);
+ messageQueue.splice(0,1);
}
+ messageQueue.push(data);
var currentTime = new Date().getTime();
for(var i = 0; i < messageQueue.length; i++){
var record = messageQueue[i];
diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go
index 96db935fe..d03c00ae6 100755
--- a/routers/admin/cloudbrains.go
+++ b/routers/admin/cloudbrains.go
@@ -52,7 +52,7 @@ func CloudBrains(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go
index 7324b6466..ba46ab58c 100755
--- a/routers/api/v1/repo/cloudbrain.go
+++ b/routers/api/v1/repo/cloudbrain.go
@@ -12,6 +12,7 @@ import (
"net/http"
"os"
"sort"
+ "strconv"
"strings"
"time"
@@ -67,49 +68,62 @@ func GetCloudbrainTask(ctx *context.APIContext) {
log.Error("GetCloudbrainByID failed:", err)
return
}
- jobResult, err := cloudbrain.GetJob(job.JobID)
- if err != nil {
- ctx.NotFound(err)
- log.Error("GetJob failed:", err)
- return
- }
- result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
- if err != nil {
- ctx.NotFound(err)
- log.Error("ConvertToJobResultPayload failed:", err)
- return
- }
- oldStatus := job.Status
- job.Status = result.JobStatus.State
- taskRoles := result.TaskRoles
- taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
- if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
- job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
- job.ContainerID = taskRes.TaskStatuses[0].ContainerID
- job.Status = taskRes.TaskStatuses[0].State
- }
-
- if result.JobStatus.State != string(models.JobWaiting) {
- models.ParseAndSetDurationFromCloudBrainOne(result, job)
- if oldStatus != job.Status {
- notification.NotifyChangeCloudbrainStatus(job, oldStatus)
+ if job.JobType == string(models.JobTypeModelSafety) {
+ routerRepo.GetAiSafetyTaskByJob(job)
+ job, err = models.GetCloudbrainByID(ID)
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "ID": ID,
+ "JobName": job.JobName,
+ "JobStatus": job.Status,
+ "SubState": "",
+ "CreatedTime": job.CreatedUnix.Format("2006-01-02 15:04:05"),
+ "CompletedTime": job.UpdatedUnix.Format("2006-01-02 15:04:05"),
+ "JobDuration": job.TrainJobDuration,
+ })
+ } else {
+ jobResult, err := cloudbrain.GetJob(job.JobID)
+ if err != nil {
+ ctx.NotFound(err)
+ log.Error("GetJob failed:", err)
+ return
}
- err = models.UpdateJob(job)
+ result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
if err != nil {
- log.Error("UpdateJob failed:", err)
+ ctx.NotFound(err)
+ log.Error("ConvertToJobResultPayload failed:", err)
+ return
+ }
+ oldStatus := job.Status
+ job.Status = result.JobStatus.State
+ taskRoles := result.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
+ job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
+ job.ContainerID = taskRes.TaskStatuses[0].ContainerID
+ job.Status = taskRes.TaskStatuses[0].State
}
- }
- ctx.JSON(http.StatusOK, map[string]interface{}{
- "ID": ID,
- "JobName": result.Config.JobName,
- "JobStatus": result.JobStatus.State,
- "SubState": result.JobStatus.SubState,
- "CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"),
- "CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"),
- "JobDuration": job.TrainJobDuration,
- })
+ if result.JobStatus.State != string(models.JobWaiting) {
+ models.ParseAndSetDurationFromCloudBrainOne(result, job)
+ if oldStatus != job.Status {
+ notification.NotifyChangeCloudbrainStatus(job, oldStatus)
+ }
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ }
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "ID": ID,
+ "JobName": result.Config.JobName,
+ "JobStatus": result.JobStatus.State,
+ "SubState": result.JobStatus.SubState,
+ "CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"),
+ "CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"),
+ "JobDuration": job.TrainJobDuration,
+ })
+ }
}
func GetCloudBrainInferenceJob(ctx *context.APIContext) {
@@ -370,6 +384,96 @@ func CloudbrainForModelConvertGetLog(ctx *context.Context) {
ctx.JSON(http.StatusOK, result)
}
+func ModelSafetyGetLog(ctx *context.APIContext) {
+ ID := ctx.Params(":id")
+ job, err := models.GetCloudbrainByID(ID)
+ if err != nil {
+ log.Error("GetCloudbrainByJobName failed: %v", err, ctx.Data["MsgID"])
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+ if job.JobType == string(models.JobTypeModelSafety) {
+ if job.Type == models.TypeCloudBrainTwo {
+ //TrainJobForModelConvertGetLog(ctx)
+ var baseLine = ctx.Query("base_line")
+ var order = ctx.Query("order")
+ var lines = ctx.Query("lines")
+ lines_int, err := strconv.Atoi(lines)
+ if err != nil {
+ log.Error("change lines(%d) string to int failed", lines_int)
+ }
+
+ if order != modelarts.OrderDesc && order != modelarts.OrderAsc {
+ log.Error("order(%s) check failed", order)
+ ctx.JSON(http.StatusBadRequest, map[string]interface{}{
+ "err_msg": "order check failed",
+ })
+ return
+ }
+ resultLogFile, err := modelarts.GetTrainJobLogFileNames(job.JobID, strconv.FormatInt(job.VersionID, 10))
+ if err != nil {
+ log.Error("GetTrainJobLogFileNames(%s) failed:%v", job.JobID, err.Error())
+ }
+ result, err := modelarts.GetTrainJobLog(job.JobID, strconv.FormatInt(job.VersionID, 10), baseLine, resultLogFile.LogFileList[0], order, lines_int)
+ if err != nil {
+ log.Error("GetTrainJobLog(%s) failed:%v", job.JobID, err.Error())
+ }
+ if err != nil {
+ log.Error("trainJobGetLog(%s) failed:%v", job.JobID, err.Error())
+ // ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobShow, nil)
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "JobID": job.JobID,
+ "LogFileName": "",
+ "StartLine": "0",
+ "EndLine": "0",
+ "Content": "",
+ "Lines": 0,
+ "CanLogDownload": false,
+ })
+ return
+ }
+ ctx.Data["log_file_name"] = resultLogFile.LogFileList[0]
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "JobID": job.JobID,
+ "LogFileName": resultLogFile.LogFileList[0],
+ "StartLine": result.StartLine,
+ "EndLine": result.EndLine,
+ "Content": result.Content,
+ "Lines": result.Lines,
+ "CanLogDownload": isCanDownloadLog(ctx, job),
+ })
+ }
+ }
+ //result := ""
+ //ctx.JSON(http.StatusOK, result)
+}
+
+func isCanDownloadLog(ctx *context.APIContext, job *models.Cloudbrain) bool {
+ if !ctx.IsSigned {
+ return false
+ }
+ return ctx.IsUserSiteAdmin() || ctx.User.ID == job.UserID
+}
+
+func ModelSafetyDownloadLogFile(ctx *context.Context) {
+ ID := ctx.Params(":id")
+ job, err := models.GetCloudbrainByID(ID)
+ if err != nil {
+ log.Error("GetCloudbrainByJobName failed: %v", err, ctx.Data["MsgID"])
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+ if job.JobType == string(models.JobTypeModelSafety) {
+ if job.Type == models.TypeCloudBrainOne {
+ CloudbrainDownloadLogFile(ctx)
+ } else if job.Type == models.TypeCloudBrainTwo {
+ ctx.SetParams("jobid", job.JobID)
+ ctx.Req.Form.Set("version_name", job.VersionName)
+ routerRepo.TrainJobDownloadLogFile(ctx)
+ }
+ }
+}
+
func CloudbrainDownloadLogFile(ctx *context.Context) {
ID := ctx.Params(":id")
job, err := models.GetCloudbrainByID(ID)
@@ -378,13 +482,18 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
ctx.ServerError(err.Error(), err)
return
}
+ if job.JobType == string(models.JobTypeModelSafety) {
+ if job.Type == models.TypeCloudBrainTwo {
+ ModelSafetyDownloadLogFile(ctx)
+ return
+ }
+ }
logDir := "/model"
- if job.JobType == string(models.JobTypeInference) {
+ if job.JobType == string(models.JobTypeInference) || job.JobType == string(models.JobTypeModelSafety) {
logDir = cloudbrain.ResultPath
}
- prefix := "/" + setting.CBCodePathPrefix + job.JobName + logDir
- files, err := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, "")
+ files, err := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, setting.CBCodePathPrefix+job.JobName+logDir, "")
if err != nil {
log.Error("query cloudbrain model failed: %v", err)
return
@@ -397,17 +506,22 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
}
}
if fileName != "" {
+ prefix := "/" + setting.CBCodePathPrefix + job.JobName + logDir
url, err := storage.Attachments.PresignedGetURL(prefix+"/"+fileName, fileName)
if err != nil {
log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"])
ctx.ServerError("Get minio get SignedUrl failed", err)
return
}
+ log.Info("fileName=" + fileName)
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect)
+ } else {
+ log.Info("fileName is null.")
+
}
}
-func CloudbrainGetLog(ctx *context.Context) {
+func CloudbrainGetLog(ctx *context.APIContext) {
ID := ctx.Params(":id")
job, err := models.GetCloudbrainByID(ID)
if err != nil {
@@ -415,12 +529,31 @@ func CloudbrainGetLog(ctx *context.Context) {
ctx.ServerError(err.Error(), err)
return
}
+ if job.JobType == string(models.JobTypeModelSafety) {
+ if job.Type == models.TypeCloudBrainOne {
+ result, err := cloudbrain.GetJob(job.JobID)
+ existStr := ""
+ if err == nil && result != nil {
+ jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
+ taskRoles := jobRes.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ existStr = taskRes.TaskStatuses[0].ExitDiagnostics
+ }
+ ctx.Data["existStr"] = existStr
+ log.Info("existStr=" + existStr)
+ } else {
+ ModelSafetyGetLog(ctx)
+ return
+ }
+
+ }
+
lines := ctx.QueryInt("lines")
baseLine := ctx.Query("base_line")
order := ctx.Query("order")
var result map[string]interface{}
resultPath := "/model"
- if job.JobType == string(models.JobTypeInference) {
+ if job.JobType == string(models.JobTypeInference) || job.JobType == string(models.JobTypeModelSafety) {
resultPath = "/result"
}
if baseLine == "" && order == "desc" {
@@ -447,12 +580,19 @@ func CloudbrainGetLog(ctx *context.Context) {
return
}
}
+ content := ""
+ if result["Content"] != nil {
+ content = result["Content"].(string)
+ }
+ if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
+ content = content + ctx.Data["existStr"].(string)
+ }
re := map[string]interface{}{
"JobID": ID,
"LogFileName": result["FileName"],
"StartLine": result["StartLine"],
"EndLine": result["EndLine"],
- "Content": result["Content"],
+ "Content": content,
"Lines": result["Lines"],
"CanLogDownload": result["FileName"] != "",
}
@@ -485,7 +625,7 @@ func getAllLineFromFile(path string) int {
}
func getLastLogFromModelDir(jobName string, lines int, resultPath string) map[string]interface{} {
- prefix := "/" + setting.CBCodePathPrefix + jobName + resultPath
+ prefix := setting.CBCodePathPrefix + jobName + resultPath
files, err := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, "")
if err != nil {
log.Error("query cloudbrain model failed: %v", err)
@@ -546,7 +686,7 @@ func getLastLogFromModelDir(jobName string, lines int, resultPath string) map[st
}
func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath string) map[string]interface{} {
- prefix := "/" + setting.CBCodePathPrefix + jobName + resultPath
+ prefix := setting.CBCodePathPrefix + jobName + resultPath
files, err := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, "")
if err != nil {
log.Error("query cloudbrain model failed: %v", err)
@@ -578,6 +718,11 @@ func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath s
for i := 0; i < endLine; i++ {
line, error := r.ReadString('\n')
if error == io.EOF {
+ if i >= startLine {
+ fileEndLine = i
+ re = re + line
+ count++
+ }
log.Info("read file completed.")
break
}
diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go
index 54c0ddc20..c665fe256 100755
--- a/routers/api/v1/repo/cloudbrain_dashboard.go
+++ b/routers/api/v1/repo/cloudbrain_dashboard.go
@@ -626,7 +626,7 @@ func GetAllCloudbrainsPeriodDistribution(ctx *context.Context) {
}
jobTypeList := []string{string(models.JobTypeDebug), string(models.JobTypeTrain), string(models.JobTypeInference), string(models.JobTypeBenchmark),
- string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet)}
+ string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet)}
for _, v := range jobTypeList {
if _, ok := cloudOneJobTypeRes[v]; !ok {
cloudOneJobTypeRes[v] = 0
@@ -709,7 +709,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go
new file mode 100644
index 000000000..5102a6722
--- /dev/null
+++ b/routers/repo/aisafety.go
@@ -0,0 +1,1049 @@
+package repo
+
+import (
+ "bufio"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/aisafety"
+ "code.gitea.io/gitea/modules/cloudbrain"
+ "code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/modelarts"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/cloudbrain/resource"
+ "code.gitea.io/gitea/services/reward/point/account"
+ uuid "github.com/satori/go.uuid"
+)
+
+const (
+ tplModelSafetyTestCreateGrampusGpu = "repo/modelsafety/newgrampusgpu"
+ tplModelSafetyTestCreateGrampusNpu = "repo/modelsafety/newgrampusnpu"
+ tplModelSafetyTestCreateGpu = "repo/modelsafety/newgpu"
+ tplModelSafetyTestCreateNpu = "repo/modelsafety/newnpu"
+ tplModelSafetyTestShow = "repo/modelsafety/show"
+)
+
+func CloudBrainAiSafetyCreateTest(ctx *context.Context) {
+ log.Info("start to create CloudBrainAiSafetyCreate")
+ uuid := uuid.NewV4()
+ id := uuid.String()
+ seriaNoParas := ctx.Query("serialNo")
+ fileName := ctx.Query("fileName")
+
+ //if jobType == string(models.JobTypeBenchmark) {
+ req := aisafety.TaskReq{
+ UnionId: id,
+ EvalName: "test1",
+ EvalContent: "test1",
+ TLPath: "test1",
+ Indicators: []string{"ACC", "ASS"},
+ CDName: "CIFAR10_1000_FGSM",
+ BDName: "CIFAR10_1000基础数据集",
+ }
+ aisafety.GetAlgorithmList()
+ if seriaNoParas != "" {
+ aisafety.GetTaskStatus(seriaNoParas)
+ } else {
+ jsonStr, err := getJsonContent("http://192.168.207.34:8065/Test_zap1234/openi_aisafety/raw/branch/master/result/" + fileName)
+ serialNo, err := aisafety.CreateSafetyTask(req, jsonStr)
+ if err == nil {
+ log.Info("serialNo=" + serialNo)
+ time.Sleep(time.Duration(2) * time.Second)
+ aisafety.GetTaskStatus(serialNo)
+ } else {
+ log.Info("CreateSafetyTask error," + err.Error())
+ }
+ }
+}
+
+func GetAiSafetyTaskByJob(job *models.Cloudbrain) {
+ if job == nil {
+ log.Error("GetCloudbrainByJobID failed")
+ return
+ }
+ syncAiSafetyTaskStatus(job)
+}
+
+func GetAiSafetyTaskTmpl(ctx *context.Context) {
+ ctx.Data["id"] = ctx.Params(":id")
+ ctx.Data["PageIsCloudBrain"] = true
+ ctx.HTML(200, tplModelSafetyTestShow)
+}
+
+func GetAiSafetyTask(ctx *context.Context) {
+ var ID = ctx.Params(":id")
+ job, err := models.GetCloudbrainByIDWithDeleted(ID)
+ if err != nil {
+ log.Error("GetCloudbrainByJobID failed:" + err.Error())
+ return
+ }
+ syncAiSafetyTaskStatus(job)
+ job, err = models.GetCloudbrainByIDWithDeleted(ID)
+ job.BenchmarkType = "安全评测"
+ job.BenchmarkTypeName = "Image Classification"
+ job.CanModify = cloudbrain.CanModifyJob(ctx, job)
+ job.CanDel = cloudbrain.CanDeleteJob(ctx, job)
+ if job.Parameters == "{\"parameter\":[]}" {
+ job.Parameters = ""
+ }
+ s, err := resource.GetCloudbrainSpec(job.ID)
+ if err == nil {
+ job.Spec = s
+ }
+ user, err := models.GetUserByID(job.UserID)
+ if err == nil {
+ tmpUser := &models.User{
+ Name: user.Name,
+ }
+ job.User = tmpUser
+ }
+
+ ctx.JSON(200, job)
+}
+
+func StopAiSafetyTask(ctx *context.Context) {
+ log.Info("start to stop the task.")
+ var ID = ctx.Params(":id")
+ task, err := models.GetCloudbrainByIDWithDeleted(ID)
+ result := make(map[string]interface{})
+ result["result_code"] = "-1"
+ if err != nil {
+ log.Info("query task error.err=" + err.Error())
+ log.Error("GetCloudbrainByJobID failed:" + err.Error())
+ result["msg"] = "No such task."
+ ctx.JSON(200, result)
+ return
+ }
+ if isTaskNotFinished(task.Status) {
+ if task.Type == models.TypeCloudBrainTwo {
+ log.Info("start to stop model arts task.")
+ _, err := modelarts.StopTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10))
+ if err != nil {
+ log.Info("stop failed.err=" + err.Error())
+ }
+ task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
+ result["msg"] = "cloudbrain.Stopped_success_update_status_fail"
+ ctx.JSON(200, result)
+ return
+ }
+ //queryTaskStatusFromCloudbrainTwo(job)
+ } else if task.Type == models.TypeCloudBrainOne {
+ if task.Status == string(models.JobStopped) || task.Status == string(models.JobFailed) || task.Status == string(models.JobSucceeded) {
+ log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"])
+ result["msg"] = "cloudbrain.Already_stopped"
+ ctx.JSON(200, result)
+ return
+ }
+ err := cloudbrain.StopJob(task.JobID)
+ if err != nil {
+ log.Error("StopJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
+ result["msg"] = "cloudbrain.Stopped_failed"
+ ctx.JSON(200, result)
+ return
+ }
+ task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
+ result["msg"] = "cloudbrain.Stopped_success_update_status_fail"
+ ctx.JSON(200, result)
+ return
+ }
+ }
+ } else {
+ if task.Status == string(models.ModelSafetyTesting) {
+ //修改为Failed
+ task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
+ result["msg"] = "cloudbrain.Stopped_success_update_status_fail"
+ ctx.JSON(200, result)
+ return
+ }
+ } else {
+ log.Info("The job is finished. status=" + task.Status)
+ }
+ }
+ result["result_code"] = "0"
+ result["msg"] = "succeed"
+ ctx.JSON(200, result)
+
+}
+
+func DelAiSafetyTask(ctx *context.Context) {
+ var ID = ctx.Params(":id")
+ task, err := models.GetCloudbrainByIDWithDeleted(ID)
+
+ if err != nil {
+ log.Error("GetCloudbrainByJobID failed:" + err.Error())
+ ctx.ServerError("No such task.", err)
+ return
+ }
+ if task.Status != string(models.JobStopped) && task.Status != string(models.JobFailed) && task.Status != string(models.JobSucceeded) {
+ log.Error("the job(%s) has not been stopped", task.JobName, ctx.Data["msgID"])
+ ctx.ServerError("the job("+task.JobName+") has not been stopped", nil)
+ return
+ }
+ if task.Type == models.TypeCloudBrainOne {
+ DeleteCloudbrainJobStorage(task.JobName, models.TypeCloudBrainOne)
+ }
+ err = models.DeleteJob(task)
+ if err != nil {
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain/benchmark")
+}
+
+func syncAiSafetyTaskStatus(job *models.Cloudbrain) {
+ log.Info("start to query safety task status.")
+ if isTaskNotFinished(job.Status) {
+ if job.Type == models.TypeCloudBrainTwo {
+ queryTaskStatusFromCloudbrainTwo(job)
+ } else if job.Type == models.TypeCloudBrainOne {
+ queryTaskStatusFromCloudbrain(job)
+ }
+ } else {
+ if job.Status == string(models.ModelSafetyTesting) {
+ queryTaskStatusFromModelSafetyTestServer(job)
+ } else {
+ log.Info("The job is finished. status=" + job.Status)
+ }
+ }
+}
+
+func TimerHandleModelSafetyTestTask() {
+ log.Info("start to TimerHandleModelSafetyTestTask")
+ tasks, err := models.GetModelSafetyTestTask()
+ if err == nil {
+ if tasks != nil && len(tasks) > 0 {
+ for _, job := range tasks {
+ syncAiSafetyTaskStatus(job)
+ }
+ } else {
+ log.Info("query running model safety test task 0.")
+ }
+ } else {
+ log.Info("query running model safety test task err." + err.Error())
+ }
+}
+
+func queryTaskStatusFromCloudbrainTwo(job *models.Cloudbrain) {
+ log.Info("The task not finished,name=" + job.DisplayJobName)
+ result, err := modelarts.GetTrainJob(job.JobID, strconv.FormatInt(job.VersionID, 10))
+ if err != nil {
+ log.Info("query train job error." + err.Error())
+ return
+ }
+
+ job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
+ job.Duration = result.Duration / 1000
+ job.TrainJobDuration = result.TrainJobDuration
+
+ if job.StartTime == 0 && result.StartTime > 0 {
+ job.StartTime = timeutil.TimeStamp(result.StartTime / 1000)
+ }
+ job.TrainJobDuration = models.ConvertDurationToStr(job.Duration)
+ if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 {
+ job.EndTime = job.StartTime.Add(job.Duration)
+ }
+ job.CorrectCreateUnix()
+
+ if job.Status != string(models.ModelArtsTrainJobCompleted) {
+ log.Info("CloudbrainTwo task status=" + job.Status)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ } else {
+ log.Info("start to deal ModelSafetyTesting, task status=" + job.Status)
+ job.Status = string(models.ModelSafetyTesting)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ //send msg to beihang
+ sendNPUInferenceResultToTest(job)
+ }
+
+}
+
+func queryTaskStatusFromCloudbrain(job *models.Cloudbrain) {
+
+ log.Info("The task not finished,name=" + job.DisplayJobName)
+ jobResult, err := cloudbrain.GetJob(job.JobID)
+
+ result, err := models.ConvertToJobResultPayload(jobResult.Payload)
+ if err != nil {
+ log.Error("ConvertToJobResultPayload failed:", err)
+ return
+ }
+ job.Status = result.JobStatus.State
+ if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
+ taskRoles := result.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ job.Status = taskRes.TaskStatuses[0].State
+ }
+ models.ParseAndSetDurationFromCloudBrainOne(result, job)
+ //updateCloudBrainOneJobTime(job)
+ log.Info("cloud brain one job status=" + job.Status)
+ if result.JobStatus.State != string(models.JobSucceeded) {
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ } else {
+ //
+ job.Status = string(models.ModelSafetyTesting)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ //send msg to beihang
+ sendGPUInferenceResultToTest(job)
+ }
+}
+
+func queryTaskStatusFromModelSafetyTestServer(job *models.Cloudbrain) {
+ result, err := aisafety.GetTaskStatus(job.PreVersionName)
+ if err == nil {
+ if result.Code == "0" {
+ if result.Data.Status == 1 {
+ log.Info("The task is running....")
+ } else {
+ if result.Data.Code == 0 {
+ job.ResultJson = result.Data.StandardJson
+ job.Status = string(models.JobSucceeded)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ } else {
+ job.ResultJson = result.Data.Msg
+ job.Status = string(models.JobFailed)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ }
+ }
+ } else {
+ log.Info("The task is failed.")
+ job.Status = string(models.JobFailed)
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ }
+ } else {
+ log.Info("The task not found.....")
+ }
+}
+
+func getAisafetyTaskReq(job *models.Cloudbrain) aisafety.TaskReq {
+ datasetname := job.DatasetName
+ datasetnames := strings.Split(datasetname, ";")
+ indicator := job.LabelName
+ EvalContent := "test1"
+ if job.Description != "" {
+ EvalContent = job.Description
+ }
+ req := aisafety.TaskReq{
+ UnionId: job.JobID,
+ EvalName: job.DisplayJobName,
+ EvalContent: EvalContent,
+ TLPath: "test1",
+ Indicators: strings.Split(indicator, ";"),
+ CDName: strings.Split(datasetnames[1], ".")[0],
+ BDName: strings.Split(datasetnames[0], ".")[0] + "基础数据集",
+ }
+ log.Info("CDName=" + req.CDName)
+ log.Info("BDName=" + req.BDName)
+ return req
+}
+
+func sendGPUInferenceResultToTest(job *models.Cloudbrain) {
+ log.Info("send sendGPUInferenceResultToTest")
+ req := getAisafetyTaskReq(job)
+ resultDir := "/result"
+ prefix := setting.CBCodePathPrefix + job.JobName + resultDir
+ files, err := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, "")
+ if err != nil {
+ log.Error("query cloudbrain one model failed: %v", err)
+ return
+ }
+ jsonContent := ""
+ for _, file := range files {
+ if strings.HasSuffix(file.FileName, "result.json") {
+ path := storage.GetMinioPath(job.JobName+resultDir+"/", file.FileName)
+ log.Info("path=" + path)
+ reader, err := os.Open(path)
+ defer reader.Close()
+ if err == nil {
+ r := bufio.NewReader(reader)
+ for {
+ line, error := r.ReadString('\n')
+ jsonContent += line
+ if error == io.EOF {
+ log.Info("read file completed.")
+ break
+ }
+ if error != nil {
+ log.Info("read file error." + error.Error())
+ break
+ }
+ }
+ }
+ break
+ }
+ }
+ if jsonContent != "" {
+ sendHttpReqToBeihang(job, jsonContent, req)
+ } else {
+ updateJobFailed(job, "推理生成的Json数据为空,无法进行评测。")
+ }
+}
+
+func sendNPUInferenceResultToTest(job *models.Cloudbrain) {
+ log.Info("start to sendNPUInferenceResultToTest")
+ req := getAisafetyTaskReq(job)
+ jsonContent := ""
+ VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
+ resultPath := modelarts.JobPath + job.JobName + modelarts.ResultPath + VersionOutputPath + "/result.json"
+ resultPath = resultPath[1:]
+ log.Info("bucket=" + setting.Bucket + " resultPath=" + resultPath)
+ body, err := storage.ObsDownloadAFile(setting.Bucket, resultPath)
+ if err != nil {
+ log.Info("ObsDownloadAFile error." + err.Error() + " resultPath=" + resultPath)
+ } else {
+ defer body.Close()
+ var data []byte
+ p := make([]byte, 4096)
+ var readErr error
+ var readCount int
+ for {
+ readCount, readErr = body.Read(p)
+ if readCount > 0 {
+ data = append(data, p[:readCount]...)
+ }
+ if readErr != nil || readCount == 0 {
+ break
+ }
+ }
+ jsonContent = string(data)
+ }
+
+ if jsonContent != "" {
+ sendHttpReqToBeihang(job, jsonContent, req)
+ } else {
+ updateJobFailed(job, "推理生成的Json数据为空,无法进行评测。")
+ }
+}
+func updateJobFailed(job *models.Cloudbrain, msg string) {
+ log.Info("The json is null. so set it failed.")
+ //update task failed.
+ job.Status = string(models.ModelArtsTrainJobFailed)
+ job.ResultJson = msg
+ err := models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+}
+func sendHttpReqToBeihang(job *models.Cloudbrain, jsonContent string, req aisafety.TaskReq) {
+ log.Info("start to send beihang ...")
+ serialNo, err := aisafety.CreateSafetyTask(req, jsonContent)
+ if err == nil {
+ //update serial no to db
+ job.PreVersionName = serialNo
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ }
+}
+
+func isTaskNotFinished(status string) bool {
+ if status == string(models.ModelArtsTrainJobRunning) || status == string(models.ModelArtsTrainJobWaiting) {
+ return true
+ }
+ if status == string(models.JobWaiting) || status == string(models.JobRunning) {
+ return true
+ }
+
+ if status == string(models.ModelArtsTrainJobUnknown) || status == string(models.ModelArtsTrainJobInit) {
+ return true
+ }
+ if status == string(models.ModelArtsTrainJobImageCreating) || status == string(models.ModelArtsTrainJobSubmitTrying) {
+ return true
+ }
+ return false
+}
+
+func AiSafetyCreateForGetGPU(ctx *context.Context) {
+ t := time.Now()
+ ctx.Data["PageIsCloudBrain"] = true
+ ctx.Data["IsCreate"] = true
+ ctx.Data["type"] = models.TypeCloudBrainOne
+ ctx.Data["compute_resource"] = models.GPUResource
+ ctx.Data["datasetType"] = models.TypeCloudBrainOne
+ ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.GPUBaseDataSetName
+ ctx.Data["BaseDataSetUUID"] = setting.ModelSafetyTest.GPUBaseDataSetUUID
+ ctx.Data["CombatDataSetName"] = setting.ModelSafetyTest.GPUCombatDataSetName
+ ctx.Data["CombatDataSetUUID"] = setting.ModelSafetyTest.GPUCombatDataSetUUID
+ log.Info("GPUBaseDataSetName=" + setting.ModelSafetyTest.GPUBaseDataSetName)
+ log.Info("GPUBaseDataSetUUID=" + setting.ModelSafetyTest.GPUBaseDataSetUUID)
+ log.Info("GPUCombatDataSetName=" + setting.ModelSafetyTest.GPUCombatDataSetName)
+ log.Info("GPUCombatDataSetUUID=" + setting.ModelSafetyTest.GPUCombatDataSetUUID)
+ var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:]
+ ctx.Data["display_job_name"] = displayJobName
+ prepareCloudbrainOneSpecs(ctx)
+ queuesDetail, _ := cloudbrain.GetQueuesDetail()
+ if queuesDetail != nil {
+ ctx.Data["QueuesDetail"] = queuesDetail
+ reqPara, _ := json.Marshal(queuesDetail)
+ log.Warn("The GPU WaitCount json:", string(reqPara))
+ } else {
+ log.Info("The GPU WaitCount not get")
+ }
+ ctx.HTML(200, tplModelSafetyTestCreateGpu)
+}
+
+func AiSafetyCreateForGetNPU(ctx *context.Context) {
+ t := time.Now()
+ ctx.Data["PageIsCloudBrain"] = true
+ ctx.Data["IsCreate"] = true
+ ctx.Data["type"] = models.TypeCloudBrainTwo
+ ctx.Data["compute_resource"] = models.NPUResource
+ var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:]
+ ctx.Data["display_job_name"] = displayJobName
+ ctx.Data["datasetType"] = models.TypeCloudBrainTwo
+ ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.NPUBaseDataSetName
+ ctx.Data["BaseDataSetUUID"] = setting.ModelSafetyTest.NPUBaseDataSetUUID
+ ctx.Data["CombatDataSetName"] = setting.ModelSafetyTest.NPUCombatDataSetName
+ ctx.Data["CombatDataSetUUID"] = setting.ModelSafetyTest.NPUCombatDataSetUUID
+
+ log.Info("NPUBaseDataSetName=" + setting.ModelSafetyTest.NPUBaseDataSetName)
+ log.Info("NPUBaseDataSetUUID=" + setting.ModelSafetyTest.NPUBaseDataSetUUID)
+ log.Info("NPUCombatDataSetName=" + setting.ModelSafetyTest.NPUCombatDataSetName)
+ log.Info("NPUCombatDataSetUUID=" + setting.ModelSafetyTest.NPUCombatDataSetUUID)
+ var resourcePools modelarts.ResourcePool
+ if err := json.Unmarshal([]byte(setting.ResourcePools), &resourcePools); err != nil {
+ ctx.ServerError("json.Unmarshal failed:", err)
+ }
+ ctx.Data["resource_pools"] = resourcePools.Info
+
+ var engines modelarts.Engine
+ if err := json.Unmarshal([]byte(setting.Engines), &engines); err != nil {
+ ctx.ServerError("json.Unmarshal failed:", err)
+ }
+ ctx.Data["engines"] = engines.Info
+
+ var versionInfos modelarts.VersionInfo
+ if err := json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil {
+ ctx.ServerError("json.Unmarshal failed:", err)
+ }
+ ctx.Data["engine_versions"] = versionInfos.Version
+
+ prepareCloudbrainTwoInferenceSpecs(ctx)
+ waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
+ ctx.Data["WaitCount"] = waitCount
+ log.Info("The NPU WaitCount is " + fmt.Sprint(waitCount))
+ ctx.HTML(200, tplModelSafetyTestCreateNpu)
+}
+
+func AiSafetyCreateForPost(ctx *context.Context) {
+ ctx.Data["PageIsCloudBrain"] = true
+ displayJobName := ctx.Query("display_job_name")
+ jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
+
+ taskType := ctx.QueryInt("type")
+ description := ctx.Query("description")
+ ctx.Data["type"] = taskType
+ ctx.Data["displayJobName"] = displayJobName
+ ctx.Data["description"] = description
+
+ repo := ctx.Repo.Repository
+
+ tpname := tplCloudBrainModelSafetyNewNpu
+ if taskType == models.TypeCloudBrainOne {
+ tpname = tplCloudBrainModelSafetyNewGpu
+ }
+
+ tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeModelSafety), displayJobName)
+ if err == nil {
+ if len(tasks) != 0 {
+ log.Error("the job name did already exist", ctx.Data["MsgID"])
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr("the job name did already exist", tpname, nil)
+ return
+ }
+ } else {
+ if !models.IsErrJobNotExist(err) {
+ log.Error("system error, %v", err, ctx.Data["MsgID"])
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr("system error", tpname, nil)
+ return
+ }
+ }
+
+ if !jobNamePattern.MatchString(jobName) {
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpname, nil)
+ return
+ }
+
+ count, err := models.GetModelSafetyCountByUserID(ctx.User.ID)
+ if err != nil {
+ log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr("system error", tpname, nil)
+ return
+ } else {
+ if count >= 1 {
+ log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain.morethanonejob"), tpname, nil)
+ return
+ }
+ }
+ BootFile := ctx.Query("boot_file")
+ BootFile = strings.TrimSpace(BootFile)
+ bootFileExist, err := ctx.Repo.FileExists(BootFile, cloudbrain.DefaultBranchName)
+ if err != nil || !bootFileExist {
+ log.Error("Get bootfile error:", err)
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_bootfile_err"), tpname, nil)
+ return
+ }
+ if taskType == models.TypeCloudBrainTwo {
+ err = createForNPU(ctx, jobName)
+ } else if taskType == models.TypeCloudBrainOne {
+ err = createForGPU(ctx, jobName)
+ }
+ if err != nil {
+ modelSafetyNewDataPrepare(ctx)
+ ctx.RenderWithErr(err.Error(), tpname, nil)
+ } else {
+ log.Info("to redirect...")
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain/benchmark")
+ }
+}
+
+func createForNPU(ctx *context.Context, jobName string) error {
+ VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
+ BootFile := ctx.Query("boot_file")
+ BootFile = strings.TrimSpace(BootFile)
+ displayJobName := ctx.Query("display_job_name")
+ description := ctx.Query("description")
+
+ srcDataset := ctx.Query("src_dataset") //uuid
+ combatDataset := ctx.Query("combat_dataset") //uuid
+ evaluationIndex := ctx.Query("evaluation_index")
+ Params := ctx.Query("run_para_list")
+ specId := ctx.QueryInt64("spec_id")
+
+ engineID := ctx.QueryInt("engine_id")
+ log.Info("engine_id=" + fmt.Sprint(engineID))
+ poolID := ctx.Query("pool_id")
+ repo := ctx.Repo.Repository
+
+ trainUrl := ctx.Query("train_url")
+ modelName := ctx.Query("model_name")
+ modelVersion := ctx.Query("model_version")
+ ckptName := ctx.Query("ckpt_name")
+ ckptUrl := "/" + trainUrl + ckptName
+ log.Info("ckpt url:" + ckptUrl)
+
+ FlavorName := ctx.Query("flaver_names")
+ EngineName := ctx.Query("engine_names")
+
+ isLatestVersion := modelarts.IsLatestVersion
+ VersionCount := modelarts.VersionCountOne
+
+ codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
+ codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath
+ resultObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.ResultPath + VersionOutputPath + "/"
+ logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/"
+ log.Info("ckpt url:" + ckptUrl)
+ spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{
+ JobType: models.JobTypeInference,
+ ComputeResource: models.NPU,
+ Cluster: models.OpenICluster,
+ AiCenterCode: models.AICenterOfCloudBrainTwo})
+ if err != nil || spec == nil {
+
+ //ctx.RenderWithErr("Resource specification not available", tplCloudBrainModelSafetyNewNpu, nil)
+ return errors.New("Resource specification not available")
+ }
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
+ return errors.New(ctx.Tr("points.insufficient_points_balance"))
+ }
+
+ //todo: del the codeLocalPath
+ _, err = ioutil.ReadDir(codeLocalPath)
+ if err == nil {
+ os.RemoveAll(codeLocalPath)
+ }
+
+ gitRepo, _ := git.OpenRepository(repo.RepoPath())
+ commitID, _ := gitRepo.GetBranchCommitID(cloudbrain.DefaultBranchName)
+
+ if err := downloadCode(repo, codeLocalPath, cloudbrain.DefaultBranchName); err != nil {
+ log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)
+ return errors.New(ctx.Tr("cloudbrain.load_code_failed"))
+ }
+
+ //todo: upload code (send to file_server todo this work?)
+ if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.ResultPath + VersionOutputPath + "/"); err != nil {
+ log.Error("Failed to obsMkdir_result: %s (%v)", repo.FullName(), err)
+ return errors.New("Failed to obsMkdir_result")
+ }
+
+ if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
+ log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
+ return errors.New("Failed to obsMkdir_log")
+ }
+
+ if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
+ log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
+ return errors.New(ctx.Tr("cloudbrain.load_code_failed"))
+ }
+
+ var parameters models.Parameters
+ param := make([]models.Parameter, 0)
+ param = append(param, models.Parameter{
+ Label: modelarts.ResultUrl,
+ Value: "s3:/" + resultObsPath,
+ }, models.Parameter{
+ Label: modelarts.CkptUrl,
+ Value: "s3:/" + ckptUrl,
+ })
+ uuid := srcDataset + ";" + combatDataset
+ datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
+ if err != nil {
+ return err
+ }
+ dataPath := dataUrl
+ jsondatas, err := json.Marshal(datasUrlList)
+ if err != nil {
+ log.Error("Failed to Marshal: %v", err)
+ return err
+ }
+ if isMultiDataset {
+ param = append(param, models.Parameter{
+ Label: modelarts.MultiDataUrl,
+ Value: string(jsondatas),
+ })
+ }
+
+ existDeviceTarget := false
+ if len(Params) != 0 {
+ err := json.Unmarshal([]byte(Params), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal params: %s (%v)", Params, err)
+ return errors.New("运行参数错误")
+ }
+
+ for _, parameter := range parameters.Parameter {
+ if parameter.Label == modelarts.DeviceTarget {
+ existDeviceTarget = true
+ }
+ if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
+ param = append(param, models.Parameter{
+ Label: parameter.Label,
+ Value: parameter.Value,
+ })
+ }
+ }
+ }
+ if !existDeviceTarget {
+ param = append(param, models.Parameter{
+ Label: modelarts.DeviceTarget,
+ Value: modelarts.Ascend,
+ })
+ }
+
+ req := &modelarts.GenerateInferenceJobReq{
+ JobName: jobName,
+ DisplayJobName: displayJobName,
+ DataUrl: dataPath,
+ Description: description,
+ CodeObsPath: codeObsPath,
+ BootFileUrl: codeObsPath + BootFile,
+ BootFile: BootFile,
+ TrainUrl: trainUrl,
+ WorkServerNumber: 1,
+ EngineID: int64(engineID),
+ LogUrl: logObsPath,
+ PoolID: poolID,
+ Uuid: uuid,
+ Parameters: param, //modelarts train parameters
+ CommitID: commitID,
+ BranchName: cloudbrain.DefaultBranchName,
+ Params: Params,
+ FlavorName: FlavorName,
+ EngineName: EngineName,
+ LabelName: evaluationIndex,
+ IsLatestVersion: isLatestVersion,
+ VersionCount: VersionCount,
+ TotalVersionCount: modelarts.TotalVersionCount,
+ ModelName: modelName,
+ ModelVersion: modelVersion,
+ CkptName: ckptName,
+ ResultUrl: resultObsPath,
+ Spec: spec,
+ DatasetName: datasetNames,
+ JobType: string(models.JobTypeModelSafety),
+ }
+
+ err = modelarts.GenerateInferenceJob(ctx, req)
+ if err != nil {
+ log.Error("GenerateTrainJob failed:%v", err.Error())
+ return err
+ }
+ return nil
+}
+
+func createForGPU(ctx *context.Context, jobName string) error {
+ BootFile := ctx.Query("boot_file")
+ BootFile = strings.TrimSpace(BootFile)
+ displayJobName := ctx.Query("display_job_name")
+ description := ctx.Query("description")
+ image := strings.TrimSpace(ctx.Query("image"))
+ srcDataset := ctx.Query("src_dataset") //uuid
+ combatDataset := ctx.Query("combat_dataset") //uuid
+ evaluationIndex := ctx.Query("evaluation_index")
+ Params := ctx.Query("run_para_list")
+ specId := ctx.QueryInt64("spec_id")
+ TrainUrl := ctx.Query("train_url")
+ CkptName := ctx.Query("ckpt_name")
+ modelName := ctx.Query("model_name")
+ modelVersion := ctx.Query("model_version")
+
+ ckptUrl := setting.Attachment.Minio.RealPath + TrainUrl + CkptName
+ log.Info("ckpt url:" + ckptUrl)
+ spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{
+ JobType: models.JobTypeBenchmark,
+ ComputeResource: models.GPU,
+ Cluster: models.OpenICluster,
+ AiCenterCode: models.AICenterOfCloudBrainOne})
+ if err != nil || spec == nil {
+ return errors.New("Resource specification not available")
+ }
+
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
+ return errors.New(ctx.Tr("points.insufficient_points_balance"))
+ }
+
+ repo := ctx.Repo.Repository
+ codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
+ os.RemoveAll(codePath)
+
+ if err := downloadCode(repo, codePath, cloudbrain.DefaultBranchName); err != nil {
+ log.Error("downloadCode failed, %v", err, ctx.Data["MsgID"])
+ return errors.New("system error")
+ }
+
+ err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/")
+ if err != nil {
+ log.Error("uploadCodeToMinio failed, %v", err, ctx.Data["MsgID"])
+ return errors.New("system error")
+ }
+
+ uuid := srcDataset + ";" + combatDataset
+ datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid)
+ log.Info("uuid=" + uuid)
+ if err != nil {
+ log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
+ return errors.New(ctx.Tr("cloudbrain.error.dataset_select"))
+ }
+ command, err := getGpuModelSafetyCommand(BootFile, Params, CkptName, displayJobName)
+ if err != nil {
+ log.Error("Get Command failed: %v", err, ctx.Data["MsgID"])
+ return errors.New(ctx.Tr("cloudbrain.error.dataset_select"))
+ }
+ log.Info("Command=" + command)
+
+ req := cloudbrain.GenerateCloudBrainTaskReq{
+ Ctx: ctx,
+ DisplayJobName: displayJobName,
+ JobName: jobName,
+ Image: image,
+ Command: command,
+ Uuids: uuid,
+ DatasetNames: datasetNames,
+ DatasetInfos: datasetInfos,
+ CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
+ ModelPath: setting.Attachment.Minio.RealPath + TrainUrl,
+ BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
+ Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
+ BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
+ JobType: string(models.JobTypeModelSafety),
+ Description: description,
+ BranchName: cloudbrain.DefaultBranchName,
+ BootFile: BootFile,
+ Params: Params,
+ CommitID: "",
+ ModelName: modelName,
+ ModelVersion: modelVersion,
+ CkptName: CkptName,
+ ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
+ Spec: spec,
+ LabelName: evaluationIndex,
+ }
+
+ err = cloudbrain.GenerateTask(req)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func getGpuModelSafetyCommand(BootFile string, params string, CkptName string, DisplayJobName string) (string, error) {
+ var command string
+ bootFile := strings.TrimSpace(BootFile)
+
+ if !strings.HasSuffix(bootFile, ".py") {
+ log.Error("bootFile(%s) format error", bootFile)
+ return command, errors.New("bootFile format error")
+ }
+
+ var parameters models.Parameters
+ var param string
+ if len(params) != 0 {
+ err := json.Unmarshal([]byte(params), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal params: %s (%v)", params, err)
+ return command, err
+ }
+
+ for _, parameter := range parameters.Parameter {
+ param += " --" + parameter.Label + "=" + parameter.Value
+ }
+ }
+
+ param += " --ckpt_url=" + cloudbrain.ModelMountPath + "/" + CkptName
+
+ command += "python /code/" + bootFile + param + " > " + cloudbrain.ResultPath + "/" + DisplayJobName + "-" + cloudbrain.LogFile
+
+ return command, nil
+}
+
+func modelSafetyNewDataPrepare(ctx *context.Context) error {
+ ctx.Data["PageIsCloudBrain"] = true
+ ctx.Data["type"] = ctx.QueryInt("type")
+ ctx.Data["boot_file"] = ctx.Query("boot_file")
+ ctx.Data["display_job_name"] = ctx.Query("display_job_name")
+ ctx.Data["description"] = ctx.Query("description")
+ ctx.Data["image"] = strings.TrimSpace(ctx.Query("image"))
+ ctx.Data["src_dataset"] = ctx.Query("src_dataset") //uuid
+ ctx.Data["combat_dataset"] = ctx.Query("combat_dataset") //uuid
+ ctx.Data["evaluationIndex"] = ctx.Query("evaluation_index")
+ ctx.Data["run_para_list"] = ctx.Query("run_para_list")
+ ctx.Data["spec_id"] = ctx.QueryInt64("spec_id")
+ ctx.Data["train_url"] = ctx.Query("train_url")
+ ctx.Data["ckpt_name"] = ctx.Query("ckpt_name")
+
+ ctx.Data["train_url"] = ctx.Query("train_url")
+ ctx.Data["ckpt_name"] = ctx.Query("ckpt_name")
+ ctx.Data["model_name"] = ctx.Query("model_name")
+ ctx.Data["model_version"] = ctx.Query("model_version")
+
+ if ctx.QueryInt("type") == models.TypeCloudBrainOne {
+ ctx.Data["type"] = models.TypeCloudBrainOne
+ ctx.Data["compute_resource"] = models.GPUResource
+ ctx.Data["datasetType"] = models.TypeCloudBrainOne
+
+ ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.GPUBaseDataSetName
+ ctx.Data["BaseDataSetUUID"] = setting.ModelSafetyTest.GPUBaseDataSetUUID
+ ctx.Data["CombatDataSetName"] = setting.ModelSafetyTest.GPUCombatDataSetName
+ ctx.Data["CombatDataSetUUID"] = setting.ModelSafetyTest.GPUCombatDataSetUUID
+ prepareCloudbrainOneSpecs(ctx)
+
+ queuesDetail, _ := cloudbrain.GetQueuesDetail()
+ if queuesDetail != nil {
+ ctx.Data["QueuesDetail"] = queuesDetail
+ }
+
+ } else {
+ ctx.Data["engine_id"] = ctx.QueryInt("engine_id")
+ ctx.Data["pool_id"] = ctx.Query("pool_id")
+
+ ctx.Data["type"] = models.TypeCloudBrainTwo
+ ctx.Data["compute_resource"] = models.NPUResource
+ ctx.Data["datasetType"] = models.TypeCloudBrainTwo
+ ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.NPUBaseDataSetName
+ ctx.Data["BaseDataSetUUID"] = setting.ModelSafetyTest.NPUBaseDataSetUUID
+ ctx.Data["CombatDataSetName"] = setting.ModelSafetyTest.NPUCombatDataSetName
+ ctx.Data["CombatDataSetUUID"] = setting.ModelSafetyTest.NPUCombatDataSetUUID
+
+ var engines modelarts.Engine
+ if err := json.Unmarshal([]byte(setting.Engines), &engines); err != nil {
+ ctx.ServerError("json.Unmarshal failed:", err)
+ }
+ ctx.Data["engines"] = engines.Info
+
+ var versionInfos modelarts.VersionInfo
+ if err := json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil {
+ ctx.ServerError("json.Unmarshal failed:", err)
+ }
+ ctx.Data["engine_versions"] = versionInfos.Version
+ prepareCloudbrainTwoInferenceSpecs(ctx)
+
+ waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "")
+ ctx.Data["WaitCount"] = waitCount
+ }
+
+ return nil
+}
+
+func getJsonContent(url string) (string, error) {
+
+ resp, err := http.Get(url)
+ if err != nil || resp.StatusCode != 200 {
+ log.Info("Get organizations url error=" + err.Error())
+ return "", err
+ }
+ bytes, err := ioutil.ReadAll(resp.Body)
+ resp.Body.Close()
+ if err != nil {
+ log.Info("Get organizations url error=" + err.Error())
+ return "", err
+ }
+ str := string(bytes)
+ //log.Info("json str =" + str)
+
+ return str, nil
+}
diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index dd44be6b2..a2ea7d51b 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -51,6 +51,9 @@ const (
tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new"
tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show"
+ tplCloudBrainModelSafetyNewGpu base.TplName = "repo/modelsafety/newgpu"
+ tplCloudBrainModelSafetyNewNpu base.TplName = "repo/modelsafety/newnpu"
+
tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit"
tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit"
@@ -140,8 +143,11 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error {
ctx.Data["is_brainscore_enabled"] = setting.IsBrainScoreEnabled
ctx.Data["datasetType"] = models.TypeCloudBrainOne
-
- ctx.Data["benchmarkMode"] = ctx.Query("benchmarkMode")
+ defaultMode := ctx.Query("benchmarkMode")
+ if defaultMode == "" {
+ defaultMode = "alogrithm"
+ }
+ ctx.Data["benchmarkMode"] = defaultMode
if ctx.Cloudbrain != nil {
ctx.Data["branch_name"] = ctx.Cloudbrain.BranchName
@@ -734,7 +740,10 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
} else {
task, err = models.GetCloudbrainByIDWithDeleted(ctx.Params(":id"))
}
-
+ if task.JobType == string(models.JobTypeModelSafety) {
+ GetAiSafetyTaskTmpl(ctx)
+ return
+ }
if err != nil {
log.Info("error:" + err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -1827,6 +1836,9 @@ func SyncCloudbrainStatus() {
}
for _, task := range cloudBrains {
+ if task.JobType == string(models.JobTypeModelSafety) {
+ continue
+ }
if task.Type == models.TypeCloudBrainOne {
result, err := cloudbrain.GetJob(task.JobID)
if err != nil {
@@ -2112,14 +2124,14 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) {
}
var jobTypes []string
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet), string(models.JobTypeModelSafety))
ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.IssuePagingNum,
},
RepoID: repo.ID,
- Type: models.TypeCloudBrainOne,
+ Type: -1,
JobTypes: jobTypes,
})
if err != nil {
@@ -2166,6 +2178,10 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) {
}
}
}
+ if task.JobType == string(models.JobTypeModelSafety) {
+ ciTasks[i].BenchmarkType = "安全评测"
+ ciTasks[i].BenchmarkTypeName = "Image Classification"
+ }
}
pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5)
diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go
index 591f0dd7c..6e44b3cd2 100755
--- a/routers/repo/modelarts.go
+++ b/routers/repo/modelarts.go
@@ -2200,6 +2200,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
ResultUrl: resultObsPath,
Spec: spec,
DatasetName: datasetNames,
+ JobType: string(models.JobTypeInference),
}
err = modelarts.GenerateInferenceJob(ctx, req)
@@ -2737,7 +2738,7 @@ func TrainJobDownloadLogFile(ctx *context.Context) {
return
}
ctx.Resp.Header().Set("Cache-Control", "max-age=0")
- http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently)
+ http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect)
}
func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
diff --git a/routers/routes/routes.go b/routers/routes/routes.go
index 4d4dbbec1..9a523ea48 100755
--- a/routers/routes/routes.go
+++ b/routers/routes/routes.go
@@ -1202,7 +1202,7 @@ func RegisterRoutes(m *macaron.Macaron) {
}, context.RepoRef())
m.Group("/modelmanage", func() {
m.Post("/create_model", repo.SaveModel)
- m.Post("/create_model_convert", reqRepoModelManageWriter, repo.SaveModelConvert)
+ m.Post("/create_model_convert", reqWechatBind, reqRepoModelManageWriter, repo.SaveModelConvert)
m.Post("/create_new_model", repo.SaveNewNameModel)
m.Delete("/delete_model", repo.DeleteModel)
m.Post("/delete_model_convert/:id", repo.DeleteModelConvert)
@@ -1229,6 +1229,18 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/downloadall", repo.DownloadMultiModelFile)
}, context.RepoRef())
+ m.Group("/modelsafety", func() {
+ m.Group("/:id", func() {
+ m.Get("/show", reqRepoCloudBrainWriter, repo.GetAiSafetyTaskTmpl)
+ m.Get("", reqRepoCloudBrainWriter, repo.GetAiSafetyTask)
+ m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.StopAiSafetyTask)
+ m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.DelAiSafetyTask)
+ })
+ m.Get("/create_gpu", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.AiSafetyCreateForGetGPU)
+ m.Get("/create_npu", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.AiSafetyCreateForGetNPU)
+ m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.AiSafetyCreateForPost)
+ }, context.RepoRef())
+
m.Group("/debugjob", func() {
m.Get("", reqRepoCloudBrainReader, repo.DebugJobIndex)
}, context.RepoRef())
diff --git a/routers/user/home.go b/routers/user/home.go
index 991c80328..1a20c26e2 100755
--- a/routers/user/home.go
+++ b/routers/user/home.go
@@ -777,7 +777,7 @@ func Cloudbrains(ctx *context.Context) {
var jobTypes []string
jobTypeNot := false
if jobType == string(models.JobTypeBenchmark) {
- jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
+ jobTypes = append(jobTypes, string(models.JobTypeBenchmark), string(models.JobTypeModelSafety), string(models.JobTypeBrainScore), string(models.JobTypeSnn4imagenet))
} else if jobType != "all" && jobType != "" {
jobTypes = append(jobTypes, jobType)
}
diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl
index 4c500b5e6..97d968954 100755
--- a/templates/admin/cloudbrain/list.tmpl
+++ b/templates/admin/cloudbrain/list.tmpl
@@ -110,13 +110,6 @@
{{.DisplayJobName}}
- {{else if eq .JobType "MODELSAFETY"}}
-
- {{.DisplayJobName}}
-
{{else if eq .JobType "INFERENCE"}}
{{.DisplayJobName}}
- {{else if eq .JobType "BENCHMARK"}}
+ {{else if eq .JobType "BENCHMARK" "MODELSAFETY"}}
@@ -148,7 +141,7 @@
+ {{$.i18n.Tr "repo.cloudbrain_task"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.status"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.createtime"}} + | +
+
+
+
+ |
+
+ {{$.i18n.Tr "repo.cloudbrain.time.starttime"}} + | ++ + | +
+ {{$.i18n.Tr "repo.cloudbrain.time.endtime"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.dura_time"}} + | ++ + | +
+ | + + + +
+
+
+
+
+ |
+
+ {{$.i18n.Tr "repo.cloudbrain.benchmark.evaluate_scenes"}} + | ++ + | +
+ {{$.i18n.Tr "repo.cloudbrain.benchmark.evaluate_type"}} + | ++ + | +
+ {{.i18n.Tr "modelsafety.base_data_set"}} + | ++ + | +
+ {{.i18n.Tr "modelsafety.combat_data_set"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.code_version"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.description"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.resource_type"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.standard"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.model_name"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelconvert.modelversion"}} + | + ++ + | +
+ {{$.i18n.Tr "repo.modelarts.infer_job_model_file"}} + | + ++ + | +
+ {{$.i18n.Tr "repo.cloudbrain_creator"}} + | ++ + | +
+ {{$.i18n.Tr "repo.modelarts.train_job.start_file"}} + | ++ + | +
+ {{.i18n.Tr "modelsafety.evaluation_indicators"}} + | ++ + | +
{{$.i18n.Tr "cloudbrain.task_delete_confirm"}}
+