diff --git a/custom/public/css/git.openi.css b/custom/public/css/git.openi.css index 3d7e15ada..3a4eff8e4 100755 --- a/custom/public/css/git.openi.css +++ b/custom/public/css/git.openi.css @@ -72,9 +72,7 @@ z-index: 10; } .ui.secondary.c2net.segment{ - /* background: #f8faff; - border: none;*/ - margin-bottom: 5em; + padding-bottom: 3em; padding-top: 2em; color: rgba(0,0,0,.87); background-image: linear-gradient(to bottom left,var(--tw-gradient-stops)); @@ -176,7 +174,7 @@ } .homeorg, .homepro, .homemodel, .i-env{ position: relative; - padding-bottom: 5em; + padding-bottom: 3em; } .homenews::before{ content: ''; @@ -203,7 +201,7 @@ padding: 0; } .newslist{ - height: 300px; + height: 260px; overflow: hidden; } @@ -278,7 +276,7 @@ z-index: 9; } .homeorg-list .card{ - background-image: linear-gradient(#FFF, #FFF 60%, #DFF0EF) !important; + /* background-image: linear-gradient(#FFF, #FFF 60%, #DFF0EF) !important; */ box-shadow: none !important; } .homeorg-list .card .ui.small.header .content{ @@ -299,7 +297,7 @@ background-color: #FFF; box-shadow: 0px 5px 10px 0px rgba(105, 192, 255, .3); border: 1px solid rgba(105, 192, 255, .4); - min-height: 10.8em; + /* min-height: 10.8em; */ } .homepro-list .ui.card>.content>.header{ line-height: 40px !important; @@ -307,7 +305,7 @@ .homepro-list .swiper-pagination-bullet-active, .homeorg-list .swiper-pagination-bullet-active{ width: 40px; - border-radius: 4px; + border-radius: 4px; } .i-env > div{ position: relative; @@ -317,6 +315,15 @@ } @media only screen and (max-width: 767px) { + .mobile-margin-left-20 { + margin-left: 20px !important; + } + .mobile-text-align-center { + text-align: center !important; + } + .mobile-justify-content-center { + justify-content: center !important; + } .am-mt-30{ margin-top: 1.5rem !important;} .ui.secondary.hometop.segment{ margin-bottom: 5.0rem; @@ -341,7 +348,7 @@ background: #FFF; } .homeorg{ - padding-left: 3.5em; + /* padding-left: 3.5em; */ } .homeorg-tit::after { left: -2.3em; diff --git a/custom/public/img/home-banner-01-en.jpg b/custom/public/img/home-banner-01-en.jpg new file mode 100644 index 000000000..59001d4f0 Binary files /dev/null and b/custom/public/img/home-banner-01-en.jpg differ diff --git a/custom/public/img/home-banner-01.jpg b/custom/public/img/home-banner-01.jpg new file mode 100644 index 000000000..1aa0563c4 Binary files /dev/null and b/custom/public/img/home-banner-01.jpg differ diff --git a/custom/public/img/home-bg-ps.png b/custom/public/img/home-bg-ps.png new file mode 100644 index 000000000..27e6ae1a1 Binary files /dev/null and b/custom/public/img/home-bg-ps.png differ diff --git a/custom/public/img/logo-footer.svg b/custom/public/img/logo-footer.svg new file mode 100644 index 000000000..3be0e5b18 --- /dev/null +++ b/custom/public/img/logo-footer.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/custom/public/img/logo-w.svg b/custom/public/img/logo-w.svg index 867acc1e2..133f63d23 100644 --- a/custom/public/img/logo-w.svg +++ b/custom/public/img/logo-w.svg @@ -1 +1,45 @@ -logo-w \ No newline at end of file + + + + + + + + + + + + + + + diff --git a/custom/public/rotation3D/img/baseimg.png b/custom/public/rotation3D/img/baseimg.png new file mode 100644 index 000000000..960f1c772 Binary files /dev/null and b/custom/public/rotation3D/img/baseimg.png differ diff --git a/index.html b/index.html index 643c31b06..0b804a669 100644 --- a/index.html +++ b/index.html @@ -107,7 +107,7 @@ - + @@ -250,7 +250,7 @@ var _hmt = _hmt || []; - + 7月中下旬登录启智AI协作平台,需登记手机号码啦>>> @@ -308,7 +308,7 @@ var _hmt = _hmt || []; isNewNotice=false; } let isShowNoticeTag = false; - let notices= [{"Title":"“我为开源打榜狂”上榜领奖者名单公示1周,10万奖金被瓜分,请大家自行确认\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/notices_0701/636.html","Visible":1},{"Title":"7月中下旬登录启智AI协作平台,需登记手机号码啦\u003e\u003e\u003e","Link":"https://git.openi.org.cn/OpenIOSSG/promote/src/branch/master/notice/Other_notes/RegisterMobileNumber.md","Visible":1},{"Title":"智算网络Beta版本上线,大大缩短算力排队时间,速来体验吧~\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/dongtai_0628/634.html","Visible":1},{"Title":"启智AI协作平台问卷调查,邀请您参加\u003e\u003e\u003e","Link":"https://wj.qq.com/s2/10362208/5c0c","Visible":1}] + let notices= [{"Title":"“我为开源打榜狂”上榜领奖者名单公示1周,10万奖金被瓜分,请大家自行确认\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/notices_0701/636.html","Visible":1},{"Title":"7月中下旬登录启智AI协作平台,需登记手机号码啦\u003e\u003e\u003e","Link":"https://openi.pcl.ac.cn/OpenIOSSG/promote/src/branch/master/notice/Other_notes/RegisterMobileNumber.md","Visible":1},{"Title":"智算网络Beta版本上线,大大缩短算力排队时间,速来体验吧~\u003e\u003e\u003e","Link":"https://openi.org.cn/html/2022/dongtai_0628/634.html","Visible":1},{"Title":"启智AI协作平台问卷调查,邀请您参加\u003e\u003e\u003e","Link":"https://wj.qq.com/s2/10362208/5c0c","Visible":1}] if(notices != null && notices!=''){ for (i =0;i - + diff --git a/models/ai_model_manage.go b/models/ai_model_manage.go index 5b14b9ba2..702cf0937 100644 --- a/models/ai_model_manage.go +++ b/models/ai_model_manage.go @@ -33,6 +33,7 @@ type AiModelManage struct { CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"` CodeCommitID string `xorm:"NULL" json:"codeCommitID"` UserId int64 `xorm:"NOT NULL" json:"userId"` + IsPrivate bool `xorm:"DEFAULT true" json:"isPrivate"` UserName string `json:"userName"` UserRelAvatarLink string `json:"userRelAvatarLink"` TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"` @@ -40,6 +41,7 @@ type AiModelManage struct { UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"` IsCanOper bool `json:"isCanOper"` IsCanDelete bool `json:"isCanDelete"` + IsCanDownload bool `json:"isCanDownload"` } type AiModelConvert struct { @@ -84,8 +86,10 @@ type AiModelQueryOptions struct { SortType string New int // JobStatus CloudbrainStatus - Type int - Status int + Type int + Status int + IsOnlyThisRepo bool + IsQueryPrivate bool } func (a *AiModelConvert) IsGpuTrainTask() bool { @@ -217,6 +221,19 @@ func SaveModelToDb(model *AiModelManage) error { return nil } +func QueryModelConvertByName(name string, repoId int64) ([]*AiModelConvert, error) { + sess := x.NewSession() + defer sess.Close() + sess.Select("*").Table(new(AiModelConvert)). + Where("name='" + name + "' and repo_id=" + fmt.Sprint(repoId)).OrderBy("created_unix desc") + aiModelManageConvertList := make([]*AiModelConvert, 0) + err := sess.Find(&aiModelManageConvertList) + if err == nil { + return aiModelManageConvertList, nil + } + return nil, err +} + func QueryModelConvertById(id string) (*AiModelConvert, error) { sess := x.NewSession() defer sess.Close() @@ -288,15 +305,30 @@ func ModifyModelDescription(id string, description string) error { return nil } -func ModifyLocalModel(id string, name, label, description string, engine int) error { +func ModifyModelPrivate(id string, isPrivate bool) error { + var sess *xorm.Session + sess = x.ID(id) + defer sess.Close() + re, err := sess.Cols("is_private").Update(&AiModelManage{ + IsPrivate: isPrivate, + }) + if err != nil { + return err + } + log.Info("success to update isPrivate from db.re=" + fmt.Sprint((re))) + return nil +} + +func ModifyLocalModel(id string, name, label, description string, engine int, isPrivate bool) error { var sess *xorm.Session sess = x.ID(id) defer sess.Close() - re, err := sess.Cols("name", "label", "description", "engine").Update(&AiModelManage{ + re, err := sess.Cols("name", "label", "description", "engine", "is_private").Update(&AiModelManage{ Description: description, Name: name, Label: label, Engine: int64(engine), + IsPrivate: isPrivate, }) if err != nil { return err @@ -411,7 +443,11 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) { builder.Eq{"ai_model_manage.status": opts.Status}, ) } - + if !opts.IsQueryPrivate { + cond = cond.And( + builder.Eq{"ai_model_manage.is_private": false}, + ) + } count, err := sess.Where(cond).Count(new(AiModelManage)) if err != nil { return nil, 0, fmt.Errorf("Count: %v", err) diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 21eafa62d..aeed8629c 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -204,6 +204,7 @@ type Cloudbrain struct { BenchmarkTypeRankLink string `xorm:"-"` StartTime timeutil.TimeStamp EndTime timeutil.TimeStamp + Cleared bool `xorm:"DEFAULT false"` Spec *Specification `xorm:"-"` } @@ -304,6 +305,17 @@ func ConvertDurationToStr(duration int64) string { } return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60) } +func ConvertStrToDuration(trainJobDuration string) int64 { + trainJobDurationList := strings.Split(trainJobDuration, ":") + if len(trainJobDurationList) == 3 { + i, _ := strconv.ParseInt(trainJobDurationList[0], 10, 64) + j, _ := strconv.ParseInt(trainJobDurationList[1], 10, 64) + k, _ := strconv.ParseInt(trainJobDurationList[2], 10, 64) + return i*3600 + j*60 + k + } else { + return 0 + } +} func IsTrainJobTerminal(status string) bool { return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) || status == GrampusStatusFailed || status == GrampusStatusStopped || status == GrampusStatusSucceeded @@ -1596,9 +1608,23 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { } } if (opts.AiCenter) != "" { - cond = cond.And( - builder.Like{"cloudbrain.ai_center", opts.AiCenter}, - ) + if opts.AiCenter == AICenterOfCloudBrainOne { + cond = cond.And( + builder.Eq{"cloudbrain.type": TypeCloudBrainOne}, + ) + } else if opts.AiCenter == AICenterOfCloudBrainTwo { + cond = cond.And( + builder.Eq{"cloudbrain.type": TypeCloudBrainTwo}, + ) + } else if opts.AiCenter == AICenterOfChengdu { + cond = cond.And( + builder.Eq{"cloudbrain.type": TypeCDCenter}, + ) + } else { + cond = cond.And( + builder.Like{"cloudbrain.ai_center", opts.AiCenter}, + ) + } } if (opts.Cluster) != "" { if opts.Cluster == "resource_cluster_openi" { @@ -1880,6 +1906,12 @@ func GetCloudbrainByID(id string) (*Cloudbrain, error) { return getRepoCloudBrain(cb) } +func IsCloudbrainExistByJobName(jobName string)(bool,error){ + return x.Unscoped().Exist(&Cloudbrain{ + JobName: jobName, + }) +} + func GetCloudbrainByIDWithDeleted(id string) (*Cloudbrain, error) { idInt64, _ := strconv.ParseInt(id, 10, 64) cb := &Cloudbrain{ID: idInt64} @@ -1975,7 +2007,7 @@ func UpdateTrainJobVersion(job *Cloudbrain) error { func updateJobTrainVersion(e Engine, job *Cloudbrain) error { var sess *xorm.Session sess = e.Where("job_id = ? AND version_name=?", job.JobID, job.VersionName) - _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix").Update(job) + _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time", "created_unix", "ai_center").Update(job) return err } @@ -2025,6 +2057,83 @@ func GetCloudBrainUnStoppedJob() ([]*Cloudbrain, error) { Find(&cloudbrains) } +func GetCloudBrainOneStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) { + cloudbrains := make([]*Cloudbrain, 0, 10) + endTimeBefore := time.Now().Unix() - int64(days)*24*3600 + missEndTimeBefore := endTimeBefore - 24*3600 + return cloudbrains, x.Unscoped().Cols("id,job_name,job_id"). + In("status", + JobStopped, JobSucceeded, JobFailed, ModelArtsCreateFailed, ModelArtsStartFailed, ModelArtsUnavailable, ModelArtsResizFailed, ModelArtsDeleted, + ModelArtsStopped, ModelArtsTrainJobCanceled, ModelArtsTrainJobCheckFailed, ModelArtsTrainJobCompleted, ModelArtsTrainJobDeleteFailed, ModelArtsTrainJobDeployServiceFailed, + ModelArtsTrainJobFailed, ModelArtsTrainJobImageFailed, ModelArtsTrainJobKilled, ModelArtsTrainJobLost, ModelArtsTrainJobSubmitFailed, ModelArtsTrainJobSubmitModelFailed). + Where("(((end_time is null or end_time=0) and updated_unix 0 { + idsIn := "" + for i, id := range tempIds { + if i == 0 { + idsIn += strconv.FormatInt(id, 10) + } else { + idsIn += "," + strconv.FormatInt(id, 10) + } + } + + _, errTemp := x.Unscoped().Exec("update cloudbrain set cleared=true where id in (" + idsIn + ")") + if errTemp != nil { + err = errTemp + } + + } + + } + return err + +} + +func getPageIds(ids []int64, page int, pagesize int) []int64 { + begin := (page - 1) * pagesize + end := (page) * pagesize + + if begin > len(ids)-1 { + return []int64{} + } + if end > len(ids)-1 { + return ids[begin:] + } else { + return ids[begin:end] + } + +} + func GetStoppedJobWithNoDurationJob() ([]*Cloudbrain, error) { cloudbrains := make([]*Cloudbrain, 0) return cloudbrains, x. @@ -2040,7 +2149,7 @@ func GetStoppedJobWithNoStartTimeEndTime() ([]*Cloudbrain, error) { func GetC2NetWithAiCenterWrongJob() ([]*Cloudbrain, error) { cloudbrains := make([]*Cloudbrain, 0) return cloudbrains, x. - In("status", ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed). + In("status", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded). Where("type = ?", TypeC2Net). Find(&cloudbrains) } @@ -2299,10 +2408,78 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er } // sess.OrderBy("cloudbrain.created_unix DESC") cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) - if err := sess.Cols("status", "type", "job_type", "train_job_duration", "duration", "compute_resource", "created_unix", "start_time", "end_time", "work_server_number").Table(&Cloudbrain{}).Unscoped().Where(cond). + if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). Find(&cloudbrains); err != nil { return nil, 0, fmt.Errorf("Find: %v", err) } + if opts.NeedRepoInfo { + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } + + } + return cloudbrains, count, nil +} + +func CloudbrainAllKanBan(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { + sess := x.NewSession() + defer sess.Close() + + var cond = builder.NewCond() + + if (opts.Type) >= 0 { + cond = cond.And( + builder.Eq{"cloudbrain.type": opts.Type}, + ) + } + if opts.BeginTimeUnix > 0 && opts.EndTimeUnix > 0 { + cond = cond.And( + builder.And(builder.Gte{"cloudbrain.created_unix": opts.BeginTimeUnix}, builder.Lte{"cloudbrain.created_unix": opts.EndTimeUnix}), + ) + } + var count int64 + var err error + count, err = sess.Unscoped().Where(cond).Count(new(Cloudbrain)) + + if err != nil { + return nil, 0, fmt.Errorf("Count: %v", err) + } + + if opts.Page >= 0 && opts.PageSize > 0 { + var start int + if opts.Page == 0 { + start = 0 + } else { + start = (opts.Page - 1) * opts.PageSize + } + sess.Limit(opts.PageSize, start) + } + // sess.OrderBy("cloudbrain.created_unix DESC") + cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) + if err := sess.Cols("id", "type", "work_server_number", "duration", "train_job_duration", "ai_center", "cluster").Table(&Cloudbrain{}).Unscoped().Where(cond). + Find(&cloudbrains); err != nil { + return nil, 0, fmt.Errorf("Find: %v", err) + } + if opts.NeedRepoInfo { + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } + + } return cloudbrains, count, nil } diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go index 19e55fb6d..40d7a2a2e 100644 --- a/models/cloudbrain_static.go +++ b/models/cloudbrain_static.go @@ -1,7 +1,6 @@ package models import ( - "fmt" "strconv" "time" @@ -42,14 +41,15 @@ type TaskDetail struct { type CloudbrainDurationStatistic struct { ID int64 `xorm:"pk autoincr"` Cluster string - AiCenterCode string + AiCenterCode string `xorm:"INDEX"` AiCenterName string ComputeResource string - AccCardType string + AccCardType string `xorm:"INDEX"` - DateTime string - DayTime string - HourTime int + DateTime timeutil.TimeStamp `xorm:"INDEX DEFAULT 0"` + DateTimeUnix timeutil.TimeStamp `xorm:"INDEX DEFAULT 0"` + DayTime string `xorm:"INDEX"` + HourTime int `xorm:"INDEX"` CardsUseDuration int CardsTotalDuration int CardsTotalNum int @@ -183,6 +183,17 @@ func GetWaittingTop() ([]*CloudbrainInfo, error) { Find(&cloudbrains); err != nil { log.Info("find error.") } + + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } return cloudbrains, nil } @@ -199,6 +210,16 @@ func GetRunningTop() ([]*CloudbrainInfo, error) { Find(&cloudbrains); err != nil { log.Info("find error.") } + var ids []int64 + for _, task := range cloudbrains { + ids = append(ids, task.RepoID) + } + repositoryMap, err := GetRepositoriesMapByIDs(ids) + if err == nil { + for _, task := range cloudbrains { + task.Repo = repositoryMap[task.RepoID] + } + } return cloudbrains, nil } @@ -275,17 +296,21 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err sess := x.NewSession() defer sess.Close() var cond = builder.NewCond() - cond = cond.And( - builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.end_time": endTime}), + cond = cond.Or( + builder.And(builder.Gte{"cloudbrain.end_time": beginTime}, builder.Lte{"cloudbrain.start_time": beginTime}, builder.Gt{"cloudbrain.start_time": 0}), ) cond = cond.Or( - builder.Eq{"cloudbrain.status": string(JobRunning)}, + builder.And(builder.Gte{"cloudbrain.start_time": beginTime}, builder.Lte{"cloudbrain.start_time": endTime}, builder.Gt{"cloudbrain.start_time": 0}), ) - sess.OrderBy("cloudbrain.created_unix ASC") + cond = cond.Or( + builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}), + ) + sess.OrderBy("cloudbrain.id ASC") cloudbrains := make([]*CloudbrainInfo, 0, 10) if err := sess.Table(&Cloudbrain{}).Unscoped().Where(cond). Find(&cloudbrains); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return cloudbrains, nil } @@ -300,7 +325,8 @@ func GetSpecByAiCenterCodeAndType(aiCenterCode string, accCardType string) ([]*C cloudbrainSpecs := make([]*CloudbrainSpec, 0, 10) if err := sess.Table(&CloudbrainSpec{}).Where(cond). Find(&cloudbrainSpecs); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return cloudbrainSpecs, nil } @@ -309,33 +335,24 @@ func InsertCloudbrainDurationStatistic(cloudbrainDurationStatistic *CloudbrainDu return xStatistic.Insert(cloudbrainDurationStatistic) } -func DeleteCloudbrainDurationStatisticHour(date string, hour int, aiCenterCode string, accCardType string) error { - sess := xStatistic.NewSession() - defer sess.Close() - if err := sess.Begin(); err != nil { - return fmt.Errorf("Begin: %v", err) +func getDurationStatistic(cb *CloudbrainDurationStatistic) (*CloudbrainDurationStatistic, error) { + has, err := x.Get(cb) + if err != nil { + return nil, err + } else if !has { + return nil, ErrJobNotExist{} } - - if _, err := sess.Where("day_time = ? AND hour_time = ? AND ai_center_code = ? AND acc_card_type = ?", date, hour, aiCenterCode, accCardType).Delete(&CloudbrainDurationStatistic{}); err != nil { - return fmt.Errorf("Delete: %v", err) - } - - if err := sess.Commit(); err != nil { - sess.Close() - return fmt.Errorf("Commit: %v", err) - } - - sess.Close() - return nil + return cb, nil } func GetCanUseCardInfo() ([]*ResourceQueue, error) { sess := x.NewSession() defer sess.Close() - sess.OrderBy("resource_queue.id ASC") + sess.OrderBy("resource_queue.cluster DESC, resource_queue.ai_center_code ASC") ResourceQueues := make([]*ResourceQueue, 0, 10) if err := sess.Table(&ResourceQueue{}).Find(&ResourceQueues); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return ResourceQueues, nil } @@ -346,7 +363,7 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur var cond = builder.NewCond() if opts.BeginTime.Unix() > 0 && opts.EndTime.Unix() > 0 { cond = cond.And( - builder.And(builder.Gte{"cloudbrain_duration_statistic.created_unix": opts.BeginTime.Unix()}, builder.Lte{"cloudbrain_duration_statistic.created_unix": opts.EndTime.Unix()}), + builder.And(builder.Gte{"cloudbrain_duration_statistic.date_time_unix": opts.BeginTime.Unix()}, builder.Lt{"cloudbrain_duration_statistic.date_time_unix": opts.EndTime.Unix()}), ) } if opts.AiCenterCode != "" { @@ -357,7 +374,8 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0, 10) if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond). Find(&CloudbrainDurationStatistics); err != nil { - log.Info("find error.") + log.Error("find error.") + return nil, err } return CloudbrainDurationStatistics, nil } @@ -365,10 +383,45 @@ func GetCardDurationStatistics(opts *DurationStatisticOptions) ([]*CloudbrainDur func GetDurationRecordBeginTime() ([]*CloudbrainDurationStatistic, error) { sess := xStatistic.NewSession() defer sess.Close() - sess.OrderBy("cloudbrain_duration_statistic.id ASC limit 1") + + var cond = builder.NewCond() + + cond = cond.And( + builder.Gt{"cloudbrain_duration_statistic.date_time_unix": 0}, + ) + + sess.OrderBy("cloudbrain_duration_statistic.date_time_unix ASC limit 1") CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0) - if err := sess.Table(&CloudbrainDurationStatistic{}).Find(&CloudbrainDurationStatistics); err != nil { - log.Info("find error.") + if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond).Find(&CloudbrainDurationStatistics); err != nil { + log.Error("find error.") + return nil, err } return CloudbrainDurationStatistics, nil } + +func GetDurationRecordUpdateTime() ([]*CloudbrainDurationStatistic, error) { + sess := xStatistic.NewSession() + defer sess.Close() + var cond = builder.NewCond() + + cond = cond.And( + builder.Gt{"cloudbrain_duration_statistic.date_time_unix": 1577808000}, + ) + sess.OrderBy("cloudbrain_duration_statistic.date_time_unix DESC limit 1") + CloudbrainDurationStatistics := make([]*CloudbrainDurationStatistic, 0) + if err := sess.Table(&CloudbrainDurationStatistic{}).Where(cond).Find(&CloudbrainDurationStatistics); err != nil { + log.Error("find error.") + return nil, err + } + return CloudbrainDurationStatistics, nil +} + +func DeleteCloudbrainDurationStatistic(beginTime timeutil.TimeStamp, endTime timeutil.TimeStamp) error { + sess := xStatistic.NewSession() + defer sess.Close() + if _, err := sess.Exec("DELETE FROM cloudbrain_duration_statistic WHERE cloudbrain_duration_statistic.date_time_unix BETWEEN ? AND ?", beginTime, endTime); err != nil { + log.Error("DELETE cloudbrain_duration_statistic data error.") + return err + } + return nil +} diff --git a/models/dataset.go b/models/dataset.go index 972503641..4c1dc24db 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -122,22 +122,22 @@ func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) for i := range datasets { if attachment.DatasetID == datasets[i].ID { - if !attachment.IsPrivate{ + if !attachment.IsPrivate { datasets[i].Attachments = append(datasets[i].Attachments, attachment) - }else{ + } else { permission, ok := permissionMap[datasets[i].ID] if !ok { permission = false datasets[i].Repo.GetOwner() if !permission { - if datasets[i].Repo.OwnerID==opts.User.ID{ + if datasets[i].Repo.OwnerID == opts.User.ID { permission = true - }else{ + } else { isCollaborator, _ := datasets[i].Repo.IsCollaborator(opts.User.ID) - isInRepoTeam,_:=datasets[i].Repo.IsInRepoTeam(opts.User.ID) + isInRepoTeam, _ := datasets[i].Repo.IsInRepoTeam(opts.User.ID) - if isCollaborator ||isInRepoTeam { + if isCollaborator || isInRepoTeam { permission = true } } @@ -603,3 +603,11 @@ func UpdateDatasetCreateUser(ID int64, user *User) error { } return nil } + +func QueryDatasetGroupByTask() ([]map[string]interface{}, error) { + rows, err := x.QueryInterface("SELECT count(*) as total,task FROM public.dataset where task <>'' group by task order by total desc limit 7") + if err != nil { + return nil, err + } + return rows, nil +} diff --git a/models/repo.go b/models/repo.go index f0760108b..832e3fc37 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1279,7 +1279,7 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr } if setting.Service.AutoWatchNewRepos { - if err = watchRepo(ctx.e, doer.ID, repo.ID, true); err != nil { + if err = watchRepo(ctx.e, doer.ID, repo.ID, true, ReceiveAllNotification); err != nil { return fmt.Errorf("watchRepo: %v", err) } } diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index b6fffca0e..26b2ea14f 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -263,7 +263,11 @@ func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error()) continue } - + // if repository.Name == "yolov5" { + // log.Info("repoName=" + repository.Name + " owner=" + repository.RepoPath()) + // authorsOneRepoJson, _ := json.Marshal(authorsOneRepo) + // log.Info("authorsOneRepoJson=" + string(authorsOneRepoJson)) + // } for key, value := range authorsOneRepo { if _, ok := authors[key]; !ok { authors[key] = &git.UserKPIStats{ diff --git a/models/repo_watch.go b/models/repo_watch.go index 573a2d78a..7c43ee352 100644 --- a/models/repo_watch.go +++ b/models/repo_watch.go @@ -24,6 +24,14 @@ const ( RepoWatchModeAuto // 3 ) +// NotifyType specifies what kind of watch the user has on a repository +type NotifyType int8 + +const ( + RejectAllNotification NotifyType = 0 + ReceiveAllNotification NotifyType = 9 +) + var ActionChan = make(chan *Action, 200) var ActionChan4Task = make(chan Action, 200) @@ -34,6 +42,7 @@ type Watch struct { RepoID int64 `xorm:"UNIQUE(watch)"` Mode RepoWatchMode `xorm:"SMALLINT NOT NULL DEFAULT 1"` CreatedUnix int64 `xorm:"created"` + NotifyType NotifyType `xorm:"SMALLINT NOT NULL DEFAULT 0"` } // getWatch gets what kind of subscription a user has on a given repository; returns dummy record if none found @@ -60,8 +69,20 @@ func IsWatching(userID, repoID int64) bool { return err == nil && isWatchMode(watch.Mode) } +// GetWatchNotifyType +func GetWatchNotifyType(userID, repoID int64) NotifyType { + watch, err := getWatch(x, userID, repoID) + if err != nil { + return RejectAllNotification + } + return watch.NotifyType +} + func watchRepoMode(e Engine, watch Watch, mode RepoWatchMode) (err error) { if watch.Mode == mode { + if _, err := e.ID(watch.ID).Cols("notify_type").Update(watch); err != nil { + return err + } return nil } if mode == RepoWatchModeAuto && (watch.Mode == RepoWatchModeDont || isWatchMode(watch.Mode)) { @@ -109,7 +130,7 @@ func WatchRepoMode(userID, repoID int64, mode RepoWatchMode) (err error) { return watchRepoMode(x, watch, mode) } -func watchRepo(e Engine, userID, repoID int64, doWatch bool) (err error) { +func watchRepo(e Engine, userID, repoID int64, doWatch bool, notifyTypes ...NotifyType) (err error) { var watch Watch if watch, err = getWatch(e, userID, repoID); err != nil { return err @@ -119,14 +140,19 @@ func watchRepo(e Engine, userID, repoID int64, doWatch bool) (err error) { } else if !doWatch { err = watchRepoMode(e, watch, RepoWatchModeNone) } else { + notifyType := RejectAllNotification + if len(notifyTypes) > 0 { + notifyType = notifyTypes[0] + } + watch.NotifyType = notifyType err = watchRepoMode(e, watch, RepoWatchModeNormal) } return err } // WatchRepo watch or unwatch repository. -func WatchRepo(userID, repoID int64, watch bool) (err error) { - return watchRepo(x, userID, repoID, watch) +func WatchRepo(userID, repoID int64, watch bool, notifyType ...NotifyType) (err error) { + return watchRepo(x, userID, repoID, watch, notifyType...) } func getWatchers(e Engine, repoID int64) ([]*Watch, error) { @@ -156,6 +182,7 @@ func getRepoWatchersIDs(e Engine, repoID int64) ([]int64, error) { return ids, e.Table("watch"). Where("watch.repo_id=?", repoID). And("watch.mode<>?", RepoWatchModeDont). + And("watch.notify_type > ?", RejectAllNotification). Select("user_id"). Find(&ids) } diff --git a/models/resource_specification.go b/models/resource_specification.go index 7a11edd05..809a3496a 100644 --- a/models/resource_specification.go +++ b/models/resource_specification.go @@ -3,6 +3,7 @@ package models import ( "code.gitea.io/gitea/modules/timeutil" "fmt" + "strings" "xorm.io/builder" ) @@ -197,12 +198,104 @@ type Specification struct { AiCenterName string IsExclusive bool ExclusiveOrg string + //specs that have the same sourceSpecId, computeResource and cluster as current spec + RelatedSpecs []*Specification } func (Specification) TableName() string { return "resource_specification" } +func (s *Specification) loadRelatedSpecs() { + if s.RelatedSpecs != nil { + return + } + defaultSpecs := make([]*Specification, 0) + if s.SourceSpecId == "" { + s.RelatedSpecs = defaultSpecs + return + } + r, err := FindSpecs(FindSpecsOptions{ + ComputeResource: s.ComputeResource, + Cluster: s.Cluster, + SourceSpecId: s.SourceSpecId, + RequestAll: true, + SpecStatus: SpecOnShelf, + }) + if err != nil { + s.RelatedSpecs = defaultSpecs + return + } + s.RelatedSpecs = r +} +func (s *Specification) GetAvailableCenterIds(userIds ...int64) []string { + s.loadRelatedSpecs() + + if len(s.RelatedSpecs) == 0 { + return make([]string, 0) + } + + var uId int64 + if len(userIds) > 0 { + uId = userIds[0] + } + //filter exclusive specs + specs := FilterExclusiveSpecs(s.RelatedSpecs, uId) + + centerIds := make([]string, len(specs)) + for i, v := range specs { + centerIds[i] = v.AiCenterCode + } + return centerIds +} + +func FilterExclusiveSpecs(r []*Specification, userId int64) []*Specification { + if userId == 0 { + return r + } + specs := make([]*Specification, 0, len(r)) + specMap := make(map[int64]string, 0) + for i := 0; i < len(r); i++ { + spec := r[i] + if _, has := specMap[spec.ID]; has { + continue + } + if !spec.IsExclusive { + specs = append(specs, spec) + specMap[spec.ID] = "" + continue + } + orgs := strings.Split(spec.ExclusiveOrg, ";") + for _, org := range orgs { + isMember, _ := IsOrganizationMemberByOrgName(org, userId) + if isMember { + specs = append(specs, spec) + specMap[spec.ID] = "" + break + } + } + } + return specs +} + +func DistinctSpecs(r []*Specification) []*Specification { + specs := make([]*Specification, 0, len(r)) + sourceSpecIdMap := make(map[string]string, 0) + for i := 0; i < len(r); i++ { + spec := r[i] + if spec.SourceSpecId == "" { + specs = append(specs, spec) + continue + } + if _, has := sourceSpecIdMap[spec.SourceSpecId]; has { + continue + } + specs = append(specs, spec) + sourceSpecIdMap[spec.SourceSpecId] = "" + } + return specs +} + func InsertResourceSpecification(r ResourceSpecification) (int64, error) { return x.Insert(&r) } @@ -298,6 +391,15 @@ func ResourceSpecOffShelf(id int64) (int64, error) { return n, err } +func GetResourceSpecificationByIds(ids []int64) ([]*Specification, error) { + r := make([]*Specification, 0) + err := x.In("resource_specification.id", ids). + Join("INNER", "resource_queue", "resource_queue.id = resource_specification.queue_id"). + Find(&r) + return r, err + +} + func GetResourceSpecification(r *ResourceSpecification) (*ResourceSpecification, error) { has, err := x.Get(r) if err != nil { diff --git a/models/user.go b/models/user.go index b21858e37..c421455bc 100755 --- a/models/user.go +++ b/models/user.go @@ -346,7 +346,7 @@ func (u *User) DashboardLink() string { if u.IsOrganization() { return setting.AppSubURL + "/org/" + u.Name + "/dashboard/" } - return setting.AppSubURL + "/" + return setting.AppSubURL + "/dashboard" } // HomeLink returns the user or organization home page link. diff --git a/models/user_analysis_for_activity.go b/models/user_analysis_for_activity.go index 2066697d2..99ff990ce 100644 --- a/models/user_analysis_for_activity.go +++ b/models/user_analysis_for_activity.go @@ -449,3 +449,20 @@ func QueryUserLoginInfo(userIds []int64) []*UserLoginLog { return loginList } + +func QueryUserAnnualReport(userId int64) *UserSummaryCurrentYear { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + log.Info("userId=" + fmt.Sprint(userId)) + + reList := make([]*UserSummaryCurrentYear, 0) + err := statictisSess.Select("*").Table(new(UserSummaryCurrentYear)).Where("id=" + fmt.Sprint(userId)).Find(&reList) + if err == nil { + if len(reList) > 0 { + return reList[0] + } + } else { + log.Info("error:=" + err.Error()) + } + return nil +} diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 05a4e6c82..4ae9f6ced 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -132,11 +132,17 @@ func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics { if count > 0 { for _, userMetrics := range allUserInfo { dateTime := time.Unix(userMetrics.CountDate, 0) - month := fmt.Sprint(dateTime.Year()) + "-" + fmt.Sprint(int(dateTime.Month())) + mInt := int(dateTime.Month()) + mString := fmt.Sprint(mInt) + if mInt < 10 { + mString = "0" + mString + } + month := fmt.Sprint(dateTime.Year()) + "-" + mString if _, ok := monthMap[month]; !ok { monthUserMetrics := &UserMetrics{ DisplayDate: month, ActivateRegistUser: userMetrics.ActivateRegistUser, + RegistActivityUser: userMetrics.RegistActivityUser, NotActivateRegistUser: userMetrics.NotActivateRegistUser, TotalUser: userMetrics.TotalUser, TotalNotActivateRegistUser: userMetrics.TotalUser - userMetrics.TotalActivateRegistUser, @@ -152,6 +158,7 @@ func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics { value.ActivateRegistUser += userMetrics.ActivateRegistUser value.NotActivateRegistUser += userMetrics.NotActivateRegistUser value.HasActivityUser += userMetrics.HasActivityUser + value.RegistActivityUser += userMetrics.RegistActivityUser value.TotalRegistUser += userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser value.ActivateIndex = float64(value.ActivateRegistUser) / float64(value.TotalRegistUser) value.DaysForMonth += 1 @@ -610,7 +617,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS DataDate := currentTimeNow.Format("2006-01-02") + " 00:01" bonusMap := make(map[string]map[string]int) - if tableName == "user_business_analysis_current_year" { + if isUserYearData(tableName) { bonusMap = getBonusMap() log.Info("truncate all data from table:user_summary_current_year ") statictisSess.Exec("TRUNCATE TABLE user_summary_current_year") @@ -712,7 +719,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1 } } - if tableName == "user_business_analysis_current_year" { + if isUserYearData(tableName) { //年度数据 subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC()) mostActiveDay := "" @@ -772,6 +779,16 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount)) } +func isUserYearData(tableName string) bool { + if tableName == "user_business_analysis_current_year" { + currentTimeNow := time.Now() + if currentTimeNow.Year() >= 2023 { + return false + } + } + return true +} + func getBonusMap() map[string]map[string]int { bonusMap := make(map[string]map[string]int) url := setting.RecommentRepoAddr + "bonus/record.txt" diff --git a/models/user_invitation.go b/models/user_invitation.go index 2d37bcb23..8a10e71a0 100644 --- a/models/user_invitation.go +++ b/models/user_invitation.go @@ -13,6 +13,7 @@ type Invitation struct { SrcUserID int64 `xorm:"NOT NULL DEFAULT 0"` UserID int64 `xorm:"NOT NULL DEFAULT 0"` Phone string `xorm:"INDEX"` + Email string `xorm:"-"` Avatar string `xorm:"-"` Name string `xorm:"-"` InvitationUserNum int `xorm:"-"` diff --git a/models/user_mail.go b/models/user_mail.go index 8bf74b81b..8388da068 100755 --- a/models/user_mail.go +++ b/models/user_mail.go @@ -216,6 +216,27 @@ func (email *EmailAddress) updateActivation(e Engine, activate bool) error { return updateUserCols(e, user, "rands") } +// UpdateEmailAddress update an email address of given user. +func (email *EmailAddress) UpdateEmailAddress(newEmailAddress string) error { + return email.updateEmailAddress(x, newEmailAddress) +} +func (email *EmailAddress) updateEmailAddress(e Engine, newEmailAddress string) error { + user, err := getUserByID(e, email.UID) + if err != nil { + return err + } + if user.Rands, err = GetUserSalt(); err != nil { + return err + } + user.Email = newEmailAddress + user.AvatarEmail = newEmailAddress + email.Email = newEmailAddress + if _, err := e.ID(email.ID).Cols("email").Update(email); err != nil { + return err + } + return updateUserCols(e, user, "email", "avatar_email") +} + // DeleteEmailAddress deletes an email address of given user. func DeleteEmailAddress(email *EmailAddress) (err error) { var deleted int64 diff --git a/modules/auth/user_form.go b/modules/auth/user_form.go index ad78607ab..521585264 100755 --- a/modules/auth/user_form.go +++ b/modules/auth/user_form.go @@ -88,6 +88,10 @@ type RegisterForm struct { Agree bool } +type UpdateEmailForm struct { + NewEmail string `binding:"Required;MaxSize(254)"` +} + // Validate valideates the fields func (f *RegisterForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { return validate(errs, ctx.Data, f, ctx.Locale) diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 8d4e57670..6111cf460 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -145,7 +145,7 @@ func isAdminOrImageCreater(ctx *context.Context, image *models.Image, err error) func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) { var id = ctx.Params(":id") - job, err := models.GetCloudbrainByID(id) + job, err := GetCloudBrainByIdOrJobId(id) if err != nil { log.Error("GetCloudbrainByID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -161,7 +161,7 @@ func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) { func AdminOrJobCreaterRight(ctx *context.Context) { var id = ctx.Params(":id") - job, err := models.GetCloudbrainByID(id) + job, err := GetCloudBrainByIdOrJobId(id) if err != nil { log.Error("GetCloudbrainByID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -177,7 +177,7 @@ func AdminOrJobCreaterRight(ctx *context.Context) { func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) { var jobID = ctx.Params(":jobid") - job, err := models.GetCloudbrainByJobID(jobID) + job, err := GetCloudBrainByIdOrJobId(jobID) if err != nil { log.Error("GetCloudbrainByJobID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -193,7 +193,7 @@ func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) { func AdminOrJobCreaterRightForTrain(ctx *context.Context) { var jobID = ctx.Params(":jobid") - job, err := models.GetCloudbrainByJobID(jobID) + job, err := GetCloudBrainByIdOrJobId(jobID) if err != nil { log.Error("GetCloudbrainByJobID failed:%v", err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) @@ -652,3 +652,19 @@ func IsElementExist(s []string, str string) bool { } return false } + +func GetCloudBrainByIdOrJobId(id string) (*models.Cloudbrain,error) { + _, err := strconv.ParseInt(id, 10, 64) + var job *models.Cloudbrain + if err != nil { + + job, err = models.GetCloudbrainByJobID(id) + } else { + job, err = models.GetCloudbrainByID(id) + if err!=nil{ + job, err = models.GetCloudbrainByJobID(id) + } + + } + return job,err +} diff --git a/modules/context/repo.go b/modules/context/repo.go index 7c425c8c0..3bdc34f0d 100755 --- a/modules/context/repo.go +++ b/modules/context/repo.go @@ -474,6 +474,7 @@ func RepoAssignment() macaron.Handler { if ctx.IsSigned { ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID) + ctx.Data["WatchNotifyType"] = models.GetWatchNotifyType(ctx.User.ID, repo.ID) ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID) ctx.Data["IsStaringDataset"] = models.IsDatasetStaringByRepoId(ctx.User.ID, repo.ID) diff --git a/modules/convert/cloudbrain.go b/modules/convert/cloudbrain.go index 1487f468e..599da4800 100644 --- a/modules/convert/cloudbrain.go +++ b/modules/convert/cloudbrain.go @@ -104,6 +104,7 @@ func ToSpecification(s *models.Specification) *api.SpecificationShow { func ToTagger(user *models.User) *api.Tagger { return &api.Tagger{ + ID: user.ID, Name: user.Name, RelAvatarURL: user.RelAvatarLink(), Email: user.Email, diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go index 985a82cdb..6a1fc6e39 100755 --- a/modules/cron/tasks_basic.go +++ b/modules/cron/tasks_basic.go @@ -5,10 +5,13 @@ package cron import ( - "code.gitea.io/gitea/modules/urfs_client/urchin" + "code.gitea.io/gitea/modules/setting" "context" "time" + "code.gitea.io/gitea/modules/urfs_client/urchin" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/services/cloudbrain/resource" "code.gitea.io/gitea/services/reward" @@ -190,6 +193,17 @@ func registerHandleRepoAndUserStatistic() { }) } +func registerHandleClearCloudbrainResult() { + RegisterTaskFatal("handle_cloudbrain_one_result_clear", &BaseConfig{ + Enabled: true, + RunAtStart: setting.ClearStrategy.RunAtStart, + Schedule: setting.ClearStrategy.Cron, + }, func(ctx context.Context, _ *models.User, _ Config) error { + cloudbrainService.ClearCloudbrainResultSpace() + return nil + }) +} + func registerHandleSummaryStatistic() { RegisterTaskFatal("handle_summary_statistic", &BaseConfig{ Enabled: true, @@ -306,6 +320,7 @@ func initBasicTasks() { registerHandleRepoAndUserStatistic() registerHandleSummaryStatistic() + registerHandleClearCloudbrainResult() registerSyncCloudbrainStatus() registerHandleOrgStatistic() @@ -317,6 +332,6 @@ func initBasicTasks() { registerHandleModelSafetyTask() - registerHandleScheduleRecord() + registerHandleScheduleRecord() registerHandleCloudbrainDurationStatistic() } diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go index b6f62560a..34d7d3fe0 100755 --- a/modules/grampus/grampus.go +++ b/modules/grampus/grampus.go @@ -37,7 +37,7 @@ var ( SpecialPools *models.SpecialPools - CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + + CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://openi.pcl.ac.cn/OpenIOSSG/%s/archive/master.zip;" + "echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;" ) @@ -105,8 +105,6 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) { createTime := timeutil.TimeStampNow() - centerID, centerName := getCentersParamter(ctx, req) - var datasetGrampus, modelGrampus []models.GrampusDataset var codeGrampus models.GrampusDataset if ProcessorTypeNPU == req.ProcessType { @@ -138,8 +136,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str ResourceSpecId: req.Spec.SourceSpecId, ImageId: req.ImageId, ImageUrl: req.ImageUrl, - CenterID: centerID, - CenterName: centerName, + CenterID: req.Spec.GetAvailableCenterIds(ctx.User.ID), ReplicaNum: 1, Datasets: datasetGrampus, Models: modelGrampus, diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index dd502dfd0..b59be307b 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -1,13 +1,18 @@ package modelarts import ( + "encoding/base64" "encoding/json" "errors" "fmt" + "io/ioutil" + "net/http" "path" "strconv" "strings" + "code.gitea.io/gitea/modules/cloudbrain" + "code.gitea.io/gitea/modules/modelarts_cd" "code.gitea.io/gitea/models" @@ -23,7 +28,7 @@ const ( //notebook storageTypeOBS = "obs" autoStopDuration = 4 * 60 * 60 - autoStopDurationMs = 4 * 60 * 60 * 1000 + AutoStopDurationMs = 4 * 60 * 60 * 1000 MORDELART_USER_IMAGE_ENGINE_ID = -1 DataSetMountPath = "/home/ma-user/work" NotebookEnv = "Python3" @@ -276,7 +281,7 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description, flavor strin return nil } -func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification) error { +func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) { if poolInfos == nil { json.Unmarshal([]byte(setting.PoolInfos), &poolInfos) } @@ -284,14 +289,14 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc imageName, err := GetNotebookImageName(imageId) if err != nil { log.Error("GetNotebookImageName failed: %v", err.Error()) - return err + return "", err } createTime := timeutil.TimeStampNow() jobResult, err := createNotebook2(models.CreateNotebook2Params{ JobName: jobName, Description: description, Flavor: spec.SourceSpecId, - Duration: autoStopDurationMs, + Duration: autoStopDurationInMs, ImageID: imageId, PoolID: poolInfos.PoolInfo[0].PoolId, Feature: models.NotebookFeature, @@ -316,10 +321,10 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc }) if errTemp != nil { log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error()) - return errTemp + return "", errTemp } } - return err + return "", err } task := &models.Cloudbrain{ Status: jobResult.Status, @@ -334,6 +339,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc Uuid: uuid, ComputeResource: models.NPUResource, Image: imageName, + BootFile: bootFile, Description: description, CreatedUnix: createTime, UpdatedUnix: createTime, @@ -342,12 +348,12 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc err = models.CreateCloudbrain(task) if err != nil { - return err + return "", err } stringId := strconv.FormatInt(task.ID, 10) notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask) - return nil + return jobResult.ID, nil } func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) { @@ -907,6 +913,11 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { if task.FlavorCode == "" { task.FlavorCode = result.Flavor } + + if oldStatus != task.Status && task.Status == string(models.ModelArtsRunning) && task.BootFile != "" { + uploadNoteBookFile(task, result) + + } err = models.UpdateJob(task) if err != nil { log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) @@ -917,6 +928,81 @@ func HandleNotebookInfo(task *models.Cloudbrain) error { return nil } +func uploadNoteBookFile(task *models.Cloudbrain, result *models.GetNotebook2Result) { + jupyterUrl := result.Url + "?token=" + result.Token + + cookies, xsrf := getCookiesAndCsrf(jupyterUrl) + if xsrf == "" { + log.Error("browser jupyterUrl failed:%v", task.DisplayJobName) + } else { + + codePath := setting.JobPath + task.JobName + cloudbrain.CodeMountPath + fileContents, err := ioutil.ReadFile(codePath + "/" + task.BootFile) + if err != nil { + log.Error("read jupyter file failed:%v", task.DisplayJobName, err) + } + + base64Content := base64.StdEncoding.EncodeToString(fileContents) + client := getRestyClient() + uploadUrl := getJupyterBaseUrl(result.Url) + "api/contents/" + path.Base(task.BootFile) + res, err := client.R(). + SetCookies(cookies). + SetHeader("X-XSRFToken", xsrf). + SetBody(map[string]interface{}{ + "type": "file", + "format": "base64", + "name": path.Base(task.BootFile), + "path": path.Base(task.BootFile), + "content": base64Content}). + Put(uploadUrl) + if err != nil { + log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) + } else if res.StatusCode() != http.StatusCreated { + log.Error("upload jupyter file failed:%v", task.DisplayJobName, err) + } + + } + +} + +func getJupyterBaseUrl(url string) string { + jupyterUrlLength := len(url) + baseUrl := url[0 : jupyterUrlLength-len(path.Base(url))] + return baseUrl +} + +func getCookiesAndCsrf(jupyterUrl string) ([]*http.Cookie, string) { + log.Info("jupyter url:"+jupyterUrl) + var cookies []*http.Cookie + const retryTimes = 10 + for i := 0; i < retryTimes; i++ { + res, err := http.Get(jupyterUrl) + if err != nil { + log.Error("browser jupyterUrl failed.",err) + if i==retryTimes-1{ + return cookies, "" + } + + } else { + cookies = res.Cookies() + xsrf := "" + for _, cookie := range cookies { + if cookie.Name == "_xsrf" { + xsrf = cookie.Value + break + } + + } + if xsrf != "" { + return cookies, xsrf + } + + } + } + return cookies, "" + +} + func SyncTempStatusJob() { jobs, err := models.GetCloudBrainTempJobs() if err != nil { diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go index c38300606..3ccba9011 100755 --- a/modules/modelarts/resty.go +++ b/modules/modelarts/resty.go @@ -280,7 +280,7 @@ sendjob: SetHeader("Content-Type", "application/json"). SetAuthToken(TOKEN). SetResult(&result). - Post(HOST + "/v1/" + setting.ProjectID + urlNotebook2 + "/" + jobID + "/" + param.Action + "?duration=" + strconv.Itoa(autoStopDurationMs)) + Post(HOST + "/v1/" + setting.ProjectID + urlNotebook2 + "/" + jobID + "/" + param.Action + "?duration=" + strconv.Itoa(AutoStopDurationMs)) if err != nil { return &result, fmt.Errorf("resty ManageNotebook2: %v", err) diff --git a/modules/modelarts_cd/modelarts.go b/modules/modelarts_cd/modelarts.go index 330b048ca..93032fa89 100755 --- a/modules/modelarts_cd/modelarts.go +++ b/modules/modelarts_cd/modelarts.go @@ -88,18 +88,18 @@ type Parameters struct { } `json:"parameter"` } -func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification) error { +func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) { imageName, err := GetNotebookImageName(imageId) if err != nil { log.Error("GetNotebookImageName failed: %v", err.Error()) - return err + return "", err } createTime := timeutil.TimeStampNow() jobResult, err := createNotebook(models.CreateNotebookWithoutPoolParams{ JobName: jobName, Description: description, Flavor: spec.SourceSpecId, - Duration: autoStopDurationMs, + Duration: autoStopDurationInMs, ImageID: imageId, Feature: models.NotebookFeature, Volume: models.VolumeReq{ @@ -123,10 +123,10 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr }) if errTemp != nil { log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error()) - return errTemp + return "", errTemp } } - return err + return "", err } task := &models.Cloudbrain{ Status: jobResult.Status, @@ -145,16 +145,17 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr CreatedUnix: createTime, UpdatedUnix: createTime, Spec: spec, + BootFile: bootFile, } err = models.CreateCloudbrain(task) if err != nil { - return err + return "", err } stringId := strconv.FormatInt(task.ID, 10) notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask) - return nil + return jobResult.ID, nil } func GetNotebookImageName(imageId string) (string, error) { @@ -175,41 +176,3 @@ func GetNotebookImageName(imageId string) (string, error) { return imageName, nil } - -/* -func HandleNotebookInfo(task *models.Cloudbrain) error { - - result, err := GetNotebook(task.JobID) - if err != nil { - log.Error("GetNotebook2(%s) failed:%v", task.DisplayJobName, err) - return err - } - - if result != nil { - oldStatus := task.Status - task.Status = result.Status - if task.StartTime == 0 && result.Lease.UpdateTime > 0 { - task.StartTime = timeutil.TimeStamp(result.Lease.UpdateTime / 1000) - } - if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) { - task.EndTime = timeutil.TimeStampNow() - } - task.CorrectCreateUnix() - task.ComputeAndSetDuration() - if oldStatus != task.Status { - notification.NotifyChangeCloudbrainStatus(task, oldStatus) - } - if task.FlavorCode == "" { - task.FlavorCode = result.Flavor - } - err = models.UpdateJob(task) - if err != nil { - log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err) - return err - } - } - - return nil -} - -*/ diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 82d02b3f1..91fb418ad 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -36,7 +36,7 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []str fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } sizeLimitTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nnew_branch_flag=0\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\n new_branch_flag=1\n echo \"You are creating a new remote branch,openI will check all files in commit history to find oversize files\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)' | \\\n awk -F ' ' -v maxbytes=\"$maxsize\" 'BEGIN {totalIn=0} {if( $3 > maxbytes && $2 == \"blob\") { totalIn+=$3; print $4} else { totalIn+=$3}} END { printf (\"totalIn=\\t%%s\",totalIn)}' )\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\nIFS=$'\\n'\n# rewrite IFS to seperate line in $files\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n \n if [[ ${file} == totalIn=* ]]; then\n\tIFS=$'\\t'\n\ttemp_array=(${file})\n\tpush_size=${temp_array[1]}\n\tcontinue\n fi\n\tunset IFS\n if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"help document -- https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\t\n fi\n echo -e \"\\033[31m- ${file}\\033[0m \"\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n# if create new branch or tag,use count-objects -v to get pack size\nif [[ $new_branch_flag -eq 1 ]]; then\n size_kb=`git count-objects -v | grep 'size-pack' | sed 's/.*\\(size-pack:\\).//'`\n size_pack_kb=`git count-objects -v | grep 'size:' | sed 's/.*\\(size:\\).//'`\n\ttotal_kb=`expr $size_kb + $size_pack_kb`\n\tlet push_size=$total_kb*1024\nfi\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n echo \"see the help document--https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS", setting.ScriptType), + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nnew_branch_flag=0\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\n new_branch_flag=1\n echo \"You are creating a new remote branch,openI will check all files in commit history to find oversize files\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)' | \\\n awk -F ' ' -v maxbytes=\"$maxsize\" 'BEGIN {totalIn=0} {if( $3 > maxbytes && $2 == \"blob\") { totalIn+=$3; print $4} else { totalIn+=$3}} END { printf (\"totalIn=\\t%%s\",totalIn)}' )\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\nIFS=$'\\n'\n# rewrite IFS to seperate line in $files\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n \n if [[ ${file} == totalIn=* ]]; then\n\tIFS=$'\\t'\n\ttemp_array=(${file})\n\tpush_size=${temp_array[1]}\n\tcontinue\n fi\n\tunset IFS\n if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"help document -- https://openi.pcl.ac.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\t\n fi\n echo -e \"\\033[31m- ${file}\\033[0m \"\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n# if create new branch or tag,use count-objects -v to get pack size\nif [[ $new_branch_flag -eq 1 ]]; then\n size_kb=`git count-objects -v | grep 'size-pack' | sed 's/.*\\(size-pack:\\).//'`\n size_pack_kb=`git count-objects -v | grep 'size:' | sed 's/.*\\(size:\\).//'`\n\ttotal_kb=`expr $size_kb + $size_pack_kb`\n\tlet push_size=$total_kb*1024\nfi\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n echo \"see the help document--https://openi.pcl.ac.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS", setting.ScriptType), fmt.Sprintf(""), fmt.Sprintf(""), } diff --git a/modules/setting/setting.go b/modules/setting/setting.go index ef72debe5..bf7eb2352 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -519,6 +519,7 @@ var ( CullIdleTimeout string CullInterval string + //benchmark config IsBenchmarkEnabled bool BenchmarkOwner string @@ -584,6 +585,8 @@ var ( TrainJobFLAVORINFOS string ModelArtsSpecialPools string ModelArtsMultiNode string + //kanban + IsCloudbrainTimingEnabled bool // modelarts-cd config ModelartsCD = struct { @@ -598,20 +601,33 @@ var ( //grampus config Grampus = struct { - Env string - Host string - UserName string - Password string - SpecialPools string - C2NetSequence string - SyncScriptProject string - LocalCenterID string - AiCenterInfo string + Env string + Host string + UserName string + Password string + SpecialPools string + C2NetSequence string + SyncScriptProject string + LocalCenterID string + AiCenterInfo string + AiCenterCodeAndNameInfo string + UsageRateBeginTime string + }{} + + ClearStrategy= struct { + Enabled bool + ResultSaveDays int + BatchSize int + DebugJobSize int + TrashSaveDays int + Cron string + RunAtStart bool }{} - C2NetInfos *C2NetSqInfos - CenterInfos *AiCenterInfos - C2NetMapInfo map[string]*C2NetSequenceInfo + C2NetInfos *C2NetSqInfos + CenterInfos *AiCenterInfos + C2NetMapInfo map[string]*C2NetSequenceInfo + AiCenterCodeAndNameMapInfo map[string]*C2NetSequenceInfo //elk config ElkUrl string @@ -712,6 +728,21 @@ var ( TeamName string }{} + FileNoteBook = struct { + ProjectName string + ImageGPU string + SpecIdGPU int64 + SpecIdCPU int64 + ImageIdNPU string + SpecIdNPU int64 + ImageIdNPUCD string + SpecIdNPUCD int64 + ImageCPUDescription string + ImageGPUDescription string + ImageNPUDescription string + ImageNPUCDDescription string + }{} + ModelConvert = struct { GPU_PYTORCH_IMAGE string GpuQueue string @@ -1426,7 +1457,7 @@ func NewContext() { DecompressOBSTaskName = sec.Key("DecompressOBSTaskName").MustString("LabelDecompressOBSQueue") sec = Cfg.Section("homepage") - RecommentRepoAddr = sec.Key("Address").MustString("https://git.openi.org.cn/OpenIOSSG/promote/raw/branch/master/") + RecommentRepoAddr = sec.Key("Address").MustString("https://openi.pcl.ac.cn/OpenIOSSG/promote/raw/branch/master/") ESSearchURL = sec.Key("ESSearchURL").MustString("http://192.168.207.94:9200") INDEXPOSTFIX = sec.Key("INDEXPOSTFIX").MustString("") @@ -1577,11 +1608,29 @@ func NewContext() { Course.OrgName = sec.Key("org_name").MustString("") Course.TeamName = sec.Key("team_name").MustString("") + sec = Cfg.Section("file_notebook") + FileNoteBook.ProjectName = sec.Key("project_name").MustString("openi-notebook") + FileNoteBook.ImageIdNPU = sec.Key("imageid_npu").MustString("") + FileNoteBook.ImageGPU = sec.Key("image_gpu").MustString("") + FileNoteBook.SpecIdCPU = sec.Key("specid_cpu").MustInt64(-1) + FileNoteBook.SpecIdGPU = sec.Key("specid_gpu").MustInt64(-1) + FileNoteBook.SpecIdNPU = sec.Key("specid_npu").MustInt64(-1) + FileNoteBook.ImageIdNPUCD = sec.Key("imageid_npu_cd").MustString("") + FileNoteBook.SpecIdNPUCD = sec.Key("specid_npu_cd").MustInt64(-1) + FileNoteBook.ImageCPUDescription = sec.Key("image_cpu_desc").MustString("") + FileNoteBook.ImageGPUDescription = sec.Key("image_gpu_desc").MustString("") + FileNoteBook.ImageNPUDescription = sec.Key("image_npu_desc").MustString("") + FileNoteBook.ImageNPUCDDescription = sec.Key("image_npu_cd_desc").MustString("") + + sec = Cfg.Section("kanban") + IsCloudbrainTimingEnabled = sec.Key("ENABLED").MustBool(false) + getGrampusConfig() getModelartsCDConfig() getModelConvertConfig() getModelSafetyConfig() getModelAppConfig() + getClearStrategy() } func getModelSafetyConfig() { @@ -1609,7 +1658,7 @@ func getModelConvertConfig() { ModelConvert.MindsporeBootFile = sec.Key("MindsporeBootFile").MustString("convert_mindspore.py") ModelConvert.TensorFlowNpuBootFile = sec.Key("TensorFlowNpuBootFile").MustString("convert_tensorflow.py") ModelConvert.TensorFlowGpuBootFile = sec.Key("TensorFlowGpuBootFile").MustString("convert_tensorflow_gpu.py") - ModelConvert.ConvertRepoPath = sec.Key("ConvertRepoPath").MustString("https://git.openi.org.cn/zouap/npu_test") + ModelConvert.ConvertRepoPath = sec.Key("ConvertRepoPath").MustString("https://openi.pcl.ac.cn/zouap/npu_test") ModelConvert.GPU_Resource_Specs_ID = sec.Key("GPU_Resource_Specs_ID").MustInt(1) ModelConvert.NPU_FlavorCode = sec.Key("NPU_FlavorCode").MustString("modelarts.bm.910.arm.public.1") ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a") @@ -1642,6 +1691,18 @@ func getModelartsCDConfig() { getNotebookFlavorInfos() } +func getClearStrategy(){ + + sec := Cfg.Section("clear_strategy") + ClearStrategy.Enabled=sec.Key("ENABLED").MustBool(false) + ClearStrategy.ResultSaveDays=sec.Key("RESULT_SAVE_DAYS").MustInt(30) + ClearStrategy.BatchSize=sec.Key("BATCH_SIZE").MustInt(500) + ClearStrategy.DebugJobSize=sec.Key("DEBUG_BATCH_SIZE").MustInt(100) + ClearStrategy.TrashSaveDays=sec.Key("TRASH_SAVE_DAYS").MustInt(90) + ClearStrategy.Cron=sec.Key("CRON").MustString("* 0,30 2-8 * * ?") + ClearStrategy.RunAtStart=sec.Key("RUN_AT_START").MustBool(false) +} + func getGrampusConfig() { sec := Cfg.Section("grampus") @@ -1651,6 +1712,8 @@ func getGrampusConfig() { Grampus.Password = sec.Key("PASSWORD").MustString("") Grampus.SpecialPools = sec.Key("SPECIAL_POOL").MustString("") Grampus.C2NetSequence = sec.Key("C2NET_SEQUENCE").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\",\"content_en\":\"Pencheng Cloudbrain Ⅰ\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\",\"content_en\":\"Pencheng Cloudbrain Ⅱ\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\",\"content_en\":\"Peking University AI Center\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\",\"content_en\":\"Hefei AI Center\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\",\"content_en\":\"Wuhan AI Center\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\",\"content_en\":\"Xi'an AI Center\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\",\"content_en\":\"Pengcheng Cloud Computing Institute\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\",\"content_en\":\"Zhongyuan AI Center\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\",\"content_en\":\"Chengdu AI Center\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\",\"content_en\":\"Hengqin AI Center\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\",\"content_en\":\"HPC & AI Center\"}]}") + Grampus.AiCenterCodeAndNameInfo = sec.Key("AI_CENTER_CODE_AND_NAME").MustString("{\"sequence\":[{\"id\":1,\"name\":\"cloudbrain_one\",\"content\":\"鹏城云脑一号\",\"content_en\":\"Pencheng Cloudbrain Ⅰ\"},{\"id\":2,\"name\":\"cloudbrain_two\",\"content\":\"鹏城云脑二号\",\"content_en\":\"Pencheng Cloudbrain Ⅱ\"},{\"id\":3,\"name\":\"beida\",\"content\":\"北大人工智能集群系统\",\"content_en\":\"Peking University AI Center\"},{\"id\":4,\"name\":\"hefei\",\"content\":\"合肥类脑智能开放平台\",\"content_en\":\"Hefei AI Center\"},{\"id\":5,\"name\":\"wuhan\",\"content\":\"武汉人工智能计算中心\",\"content_en\":\"Wuhan AI Center\"},{\"id\":6,\"name\":\"xian\",\"content\":\"西安未来人工智能计算中心\",\"content_en\":\"Xi'an AI Center\"},{\"id\":7,\"pclcci\":\"more\",\"content\":\"鹏城云计算所\",\"content_en\":\"Pengcheng Cloud Computing Institute\"},{\"id\":8,\"name\":\"xuchang\",\"content\":\"中原人工智能计算中心\",\"content_en\":\"Zhongyuan AI Center\"},{\"id\":9,\"name\":\"chengdu\",\"content\":\"成都人工智能计算中心\",\"content_en\":\"Chengdu AI Center\"},{\"id\":10,\"name\":\"more\",\"content\":\"横琴先进智能计算中心\",\"content_en\":\"Hengqin AI Center\"},{\"id\":11,\"name\":\"more\",\"content\":\"国家超级计算济南中心\",\"content_en\":\"HPC & AI Center\"}]}") + Grampus.UsageRateBeginTime = sec.Key("USAGE_RATE_BEGIN_TIME").MustString("2021-01-01 00:00:00") if Grampus.C2NetSequence != "" { if err := json.Unmarshal([]byte(Grampus.C2NetSequence), &C2NetInfos); err != nil { log.Error("Unmarshal(C2NetSequence) failed:%v", err) @@ -1660,6 +1723,15 @@ func getGrampusConfig() { C2NetMapInfo[value.Name] = value } } + if Grampus.AiCenterCodeAndNameInfo != "" { + if err := json.Unmarshal([]byte(Grampus.AiCenterCodeAndNameInfo), &C2NetInfos); err != nil { + log.Error("Unmarshal(AiCenterCodeAndNameInfo) failed:%v", err) + } + AiCenterCodeAndNameMapInfo = make(map[string]*C2NetSequenceInfo) + for _, value := range C2NetInfos.C2NetSqInfo { + AiCenterCodeAndNameMapInfo[value.Name] = value + } + } Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus") Grampus.LocalCenterID = sec.Key("LOCAL_CENTER_ID").MustString("cloudbrain2") Grampus.AiCenterInfo = sec.Key("AI_CENTER_INFO").MustString("") diff --git a/modules/structs/cloudbrain.go b/modules/structs/cloudbrain.go index 866c85dad..9ea5601c9 100644 --- a/modules/structs/cloudbrain.go +++ b/modules/structs/cloudbrain.go @@ -41,6 +41,14 @@ type CreateTrainJobOption struct { SpecId int64 `json:"spec_id" binding:"Required"` } +type CreateFileNotebookJobOption struct { + Type int `json:"type"` //0 CPU 1 GPU 2 NPU + File string `json:"file" binding:"Required"` + BranchName string `json:"branch_name" binding:"Required"` + OwnerName string `json:"owner_name" binding:"Required"` + ProjectName string `json:"project_name" binding:"Required"` +} + type Cloudbrain struct { ID int64 `json:"id"` JobID string `json:"job_id"` diff --git a/modules/structs/pipeline.go b/modules/structs/pipeline.go new file mode 100644 index 000000000..fd26d1b51 --- /dev/null +++ b/modules/structs/pipeline.go @@ -0,0 +1,23 @@ +package structs + +type Pipeline struct { + ID int64 `json:"id"` + Name string `json:"name"` + Status string `json:"status"` +} +type NodeInfo struct { + Name string `json:"name"` + Status string `json:"status"` + Code string `json:"code"` + Message string `json:"message"` +} + +type PipelineNotification struct { + Type int `json:"type"` + Username string `json:"username"` + Reponame string `json:"reponame"` + Pipeline Pipeline `json:"pipeline"` + PipelineRunId string `json:"pipeline_run_id"` + Node NodeInfo `json:"node"` + OccurTime int64 `json:"occur_time"` +} diff --git a/modules/structs/tagger.go b/modules/structs/tagger.go index 8933c8c5c..c32ad8040 100644 --- a/modules/structs/tagger.go +++ b/modules/structs/tagger.go @@ -1,6 +1,7 @@ package structs type Tagger struct { + ID int64 `json:"id"` Name string `json:"name"` Email string `json:"email"` RelAvatarURL string `json:"relAvatarURL"` diff --git a/modules/templates/helper.go b/modules/templates/helper.go index 3e424454b..c314127f1 100755 --- a/modules/templates/helper.go +++ b/modules/templates/helper.go @@ -151,6 +151,9 @@ func NewFuncMap() []template.FuncMap { "EscapePound": func(str string) string { return strings.NewReplacer("%", "%25", "#", "%23", " ", "%20", "?", "%3F").Replace(str) }, + "IpynbBool":func(str string) bool{ + return strings.Contains(str, ".ipynb") + }, "nl2br": func(text string) template.HTML { return template.HTML(strings.Replace(template.HTMLEscapeString(text), "\n", "
", -1)) }, diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 9a16ae0ff..082b35ca8 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -24,6 +24,7 @@ enable_javascript = This website works better with JavaScript. toc = Table of Contents return=Back OpenI calculation_points = Calculation Points +notice_announcement = Notice Announcement username = Username email = Email Address @@ -236,7 +237,7 @@ page_title=Explore Better AI page_small_title=OpenI AI Development Cooperation Platform page_description=The one-stop collaborative development environment for AI field provides AI development pipeline integrating code development, data management, model debugging, reasoning and evaluation page_use=Use Now -page_only_dynamic=Only show the dynamics of open source projects +page_only_dynamic=The dynamics of open source projects page_recommend_org=Recommended Organizations page_recommend_org_desc=These excellent organizations are using the OpenI AI Collaboration Platform for collaborative development of projects. To show your organization here, page_recommend_org_commit=Click here to submit. @@ -260,7 +261,7 @@ page_dev_env_desc3_title=Once Configuration, Multiple Reuse page_dev_env_desc3_desc=Provide execution environment sharing, Once Configuration, Multiple Reuse. Lower the threshold of model development, and avoid spending repetitive time configuring complex environments. page_dev_yunlao=OpenI AI Collaboration Platform page_dev_yunlao_desc1=OpenI AI collaboration platform has cooperated with Pengcheng cloud brain and China computing power network (C²NET) can be used to complete AI development tasks by using the rich computing resources of Pengcheng cloud brain and China computing network. -page_dev_yunlao_desc2=Pengcheng CloudBrain's existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure consists of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors. +page_dev_yunlao_desc2=Pengcheng CloudBrain is existing AI computing power is 100p FLOPS@FP16 (billions of half precision floating-point calculations per second), the main hardware infrastructure consists of GPU servers equipped with NVIDIA Tesla V100 and A100, and Atlas 900 AI clusters equipped with Kunpeng and shengteng processors. page_dev_yunlao_desc3=China computing power network (C²NET) phase I can realize high-speed network interconnection between different artificial intelligence computing centers, and realize reasonable scheduling of computing power and flexible allocation of resources. At present, 11 intelligent computing centers have been connected, and the total scale of computing power is 1924p OPS@FP16. OpenI AI collaboration platform has been connected to Pengcheng Cloud Computing Institute, Chengdu Intelligent Computing Center, Zhongyuan Intelligent Computing Center, Hefei brain and other nodes. page_dev_yunlao_desc4=Developers can freely select the corresponding computing resources according to the use needs, and can test the adaptability, performance, stability, etc. of the model in different hardware environments. page_dev_yunlao_desc5=If your model requires more computing resources, you can also apply for it separately. @@ -283,6 +284,7 @@ search_ge= wecome_AI_plt = Welcome to OpenI AI Collaboration Platform! explore_AI = Explore better AI, come here to find more interesting datasets = Datasets +datasets_descr = Open source dataset base, seamlessly integrated with your project. View all repositories = Repositories use_plt__fuction = To use the AI collaboration functions provided by this platform, such as: hosting code, sharing data, debugging algorithms or training models, start with provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. @@ -290,6 +292,12 @@ activity = Activity no_events = There are no events related or_t = or powerdby=Powered_by Pengcheng CloudBrain、China Computing NET(C²NET)、 +experience_officer=Experience Officer +openi_experience_officer_plan=OpenI AI experience officer growth plan +more_benefits=, More benefits +org_see=See +more_notice=More notices +vedio_detail=Video details [explore] repos = Repositories @@ -345,8 +353,10 @@ account_activated = Account has been activated prohibit_login = Sign In Prohibited prohibit_login_desc = Your account is prohibited to sign in, please contact your site administrator. resent_limit_prompt = You have already requested an activation email recently. Please wait 3 minutes and try again. -has_unconfirmed_mail = Hi %s, you have an unconfirmed email address (%s). If you haven't received a confirmation email or need to resend a new one, please click on the button below. -resend_mail = Click here to resend your activation email +has_unconfirmed_mail = Hi %s, you have an unconfirmed email address (%s). +has_unconfirmed_mail_resend = If you did not receive the activation email, or need to resend it, please click the "Resend your activation email" button below. +has_unconfirmed_mail_change =If you need to change your email address before sending an activation email, please click the "Change email" button below. +resend_mail = Resend your activation email email_not_associate = The email address is not associated with any account. email_not_main=The email address is wrong, please input your primary email address. email_not_right=The email address is not associated with any account, please input the right email address. @@ -383,15 +393,19 @@ openid_register_desc = The chosen OpenID URI is unknown. Associate it with a new openid_signin_desc = Enter your OpenID URI. For example: https://anne.me, bob.openid.org.cn or gnusocial.net/carry. disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. email_domain_blacklisted = You cannot register with this kind of email address. +email_domain_blacklisted_change = This type of email address is not currently supported. authorize_application = Authorize Application authorize_redirect_notice = You will be redirected to %s if you authorize this application. authorize_application_created_by = This application was created by %s. authorize_application_description = If you grant the access, it will be able to access and write to all your account information, including private repos and organisations. authorize_title = Authorize "%s" to access your account? authorization_failed = Authorization failed -authorization_failed_desc = The authorization failed because we detected an invalid request. Please contact the maintainer of the app you've tried to authorize. +authorization_failed_desc = The authorization failed because we detected an invalid request. Please contact the maintainer of the app you have tried to authorize. disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. sspi_auth_failed = SSPI authentication failed +change_email = Change email +change_email_address = Change email address +new_email_address = New email address [phone] format_err=The format of phone number is wrong. query_err=Fail to query phone number, please try again later. @@ -1007,6 +1021,8 @@ readme = README readme_helper = Select a README file template. auto_init = Initialize Repository (Adds .gitignore, License and README) create_repo = Create Repository +failed_to_create_repo=Failed to create repository, please try again later. +failed_to_create_notebook_repo=Failed to create %s repository, please check whether you have the same name project, if yes please update or delete it first. create_course = Publish Course failed_to_create_course=Failed to publish course, please try again later. default_branch = Default Branch @@ -1041,6 +1057,10 @@ model_experience = Model Experience model_noright=You have no right to do the operation. model_rename=Duplicate model name, please modify model name. +notebook_file_not_exist=Notebook file does not exist. +notebook_select_wrong=Please select a Notebook(.ipynb) file first. +notebook_file_no_right=You have no right to access the Notebook(.ipynb) file. + date=Date repo_add=Project Increment repo_total=Project Total @@ -1217,10 +1237,10 @@ cloudbrain.benchmark.evaluate_child_type=Child Type cloudbrain.benchmark.evaluate_mirror=Mirror cloudbrain.benchmark.evaluate_train=Train Script cloudbrain.benchmark.evaluate_test=Test Script -cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} cloudbrain.morethanonejob=You already have a running or waiting task, create it after that task is over. cloudbrain.morethanonejob1=You have created an equivalent task that is waiting or running, please wait for the task to finish before creating it. -cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in Home > Cloudbrain Task . +cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in Home > Cloudbrain Task . modelarts.infer_job_model = Model modelarts.infer_job_model_file = Model File @@ -1243,7 +1263,7 @@ model_Evaluation_not_created = Model evaluation has not been created repo_not_initialized = Code version: You have not initialized the code repository, please initialized first ; debug_task_running_limit =Running time: no more than 4 hours, it will automatically stop if it exceeds 4 hours; dataset_desc = Dataset: Cloud Brain 1 provides CPU/GPU,Cloud Brain 2 provides Ascend NPU.And dataset also needs to be uploaded to the corresponding environment; -platform_instructions = Instructions for use: You can refer to the OpenI_Learning course of Qizhi AI collaboration platform. +platform_instructions = Instructions for use: You can refer to the OpenI_Learning course of Qizhi AI collaboration platform. platform_instructions1 = Instructions for use: You can refer to the platform_instructions2 = OpenI_Learning platform_instructions3 = course of Openi AI collaboration platform. @@ -1285,6 +1305,11 @@ model.manage.select.engine=Select model engine model.manage.modelfile=Model file model.manage.modellabel=Model label model.manage.modeldesc=Model description +model.manage.modelaccess=Model Access +model.manage.modelaccess.public=Public +model.manage.modelaccess.private=Private +model.manage.modelaccess.setpublic=Set Public +model.manage.modelaccess.setprivate=Set Private model.manage.baseinfo=Base Information modelconvert.notcreate=No model conversion task has been created. modelconvert.importfirst1=Please import the @@ -1394,6 +1419,11 @@ star = Star fork = Fork download_archive = Download Repository star_fail=Failed to %s the dataset. +watched=Watched +notWatched=Not watched +un_watch=Unwatch +watch_all=Watch all +watch_no_notify=Watch but not notify no_desc = No Description no_label = No labels @@ -1435,6 +1465,7 @@ blame = Blame normal_view = Normal View line = line lines = lines +notebook_open = Open in Notebook editor.new_file = New File editor.upload_file = Upload File @@ -3188,6 +3219,9 @@ foot.copyright= Copyright: New Generation Artificial Intelligence Open Source Op Platform_Tutorial = Tutorial foot.advice_feedback = Feedback resource_description = Resource Note +foot.openi_subscription_number = OpenI subscription number +foot.user_communication_group = User communication group + [cloudbrain] all_resource_cluster=All Cluster all_ai_center=All Computing NET @@ -3217,6 +3251,7 @@ specification = specification select_specification = select specification description = description wrong_specification=You cannot use this specification, please choose another item. +result_cleared=The files of the task have been cleared, can not restart any more, please create a new debug task instead. resource_use=Resource Occupancy job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-). diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index ce179949a..f4e8f1aea 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -24,6 +24,7 @@ enable_javascript=使用 JavaScript能使本网站更好的工作。 toc=目录 return=返回OpenI calculation_points=算力积分 +notice_announcement=通知公告 username=用户名 email=电子邮件地址 @@ -238,7 +239,7 @@ page_title=探索更好的AI page_small_title=启智AI协作平台 page_description=面向AI领域的一站式协同开发环境,提供集代码开发、数据管理、模型调试、推理和评测为一体的AI开发流水线 page_use=立即使用 -page_only_dynamic=仅展示开源项目动态 +page_only_dynamic=社区开源项目动态 page_recommend_org=推荐组织 page_recommend_org_desc=这些优秀的组织正在使用启智AI开发协作平台;你的组织也想展示到这里, page_recommend_org_commit=点此提交 @@ -285,6 +286,7 @@ search_ge=个 wecome_AI_plt=欢迎来到启智AI协作平台! explore_AI = 探索更好的AI,来这里发现更有意思的 datasets = 数据集 +datasets_descr=开源数据集大本营,同你的项目无缝集成。查看所有 repositories = 项目 use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 provide_resoure = 平台目前提供CPU、GPU、NPU的普惠算力资源,可进行多种类型的AI任务。 @@ -293,7 +295,12 @@ activity = 活动 no_events = 还没有与您相关的活动 or_t = 或 powerdby=Powered_by 鹏城实验室云脑、中国算力网(C²NET)、 - +experience_officer=体验官 +openi_experience_officer_plan=启智社区体验官成长计划 +more_benefits=,超多福利大放送 +org_see=。查看 +more_notice=更多通知 +vedio_detail=详细介绍视频 [explore] repos=项目 @@ -349,8 +356,10 @@ account_activated=帐户已激活 prohibit_login=禁止登录 prohibit_login_desc=您的帐户被禁止登录,请与网站管理员联系。 resent_limit_prompt=您请求发送激活邮件过于频繁,请等待 3 分钟后再试! -has_unconfirmed_mail=%s 您好,系统检测到您有一封发送至 %s 但未被确认的邮件。如果您未收到激活邮件,或需要重新发送,请单击下方的按钮。 -resend_mail=单击此处重新发送确认邮件 +has_unconfirmed_mail=%s 您好,系统检测到您有一封发送至 %s 但未被确认的邮件。 +has_unconfirmed_mail_resend=如果您未收到激活邮件,或需要重新发送,请单击下方的 "重新发送确认邮件 " 按钮。 +has_unconfirmed_mail_change=如果您需要更改邮箱后再发送激活邮件,请单击下方的 "修改邮箱" 按钮。 +resend_mail=重新发送确认邮件 email_not_associate=您输入的邮箱地址未被关联到任何帐号! email_not_main=电子邮箱地址不正确,请输入您设置的主要邮箱地址。 email_not_right=您输入了不存在的邮箱地址,请输入正确的邮箱地址。 @@ -387,6 +396,7 @@ openid_register_desc=所选的 OpenID URI 未知。在这里关联一个新帐 openid_signin_desc=输入您的 OpenID URI。例如: https://anne.me、bob.openid.org.cn 或 gnusocial.net/carry。 disable_forgot_password_mail=帐户恢复功能已被禁用。请与网站管理员联系。 email_domain_blacklisted=暂不支持此类电子邮件地址注册。 +email_domain_blacklisted_change=暂不支持此类电子邮件地址。 authorize_application=应用授权 authorize_redirect_notice=如果您授权此应用,您将会被重定向到 %s。 authorize_application_created_by=此应用由%s创建。 @@ -396,6 +406,9 @@ authorization_failed=授权失败 authorization_failed_desc=授权失败,这是一个无效的请求。请联系尝试授权应用的管理员。 disable_forgot_password_mail = Account recovery is disabled. Please contact your site administrator. sspi_auth_failed=SSPI 认证失败 +change_email=修改邮箱 +change_email_address=修改邮箱地址 +new_email_address=新邮箱地址 [phone] format_err=手机号格式错误。 query_err=查询手机号失败,请稍后再试。 @@ -1013,6 +1026,8 @@ readme=自述 readme_helper=选择自述文件模板。 auto_init=初始化存储库 (添加. gitignore、许可证和自述文件) create_repo=创建项目 +failed_to_create_repo=创建项目失败,请稍后再试。 +failed_to_create_notebook_repo=创建项目%s失败,请检查您是否有同名的项目,如果有请先手工修改或删除后重试。 create_course=发布课程 failed_to_create_course=发布课程失败,请稍后再试。 default_branch=默认分支 @@ -1041,6 +1056,9 @@ model_experience = 模型体验 model_noright=您没有操作权限。 model_rename=模型名称重复,请修改模型名称 +notebook_file_not_exist=Notebook文件不存在。 +notebook_select_wrong=请先选择Notebook(.ipynb)文件。 +notebook_file_no_right=您没有这个Notebook文件的读权限。 date=日期 repo_add=新增项目 @@ -1230,8 +1248,8 @@ cloudbrain.benchmark.evaluate_child_type=子类型 cloudbrain.benchmark.evaluate_mirror=镜像 cloudbrain.benchmark.evaluate_train=训练程序 cloudbrain.benchmark.evaluate_test=测试程序 -cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} -cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} +cloudbrain.benchmark.model.types={"type":[{"id":1,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=detection","first":"目标检测","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=reid","first":"目标重识别","second":[{"id":1,"value":"车辆重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"基于图像的行人重识别","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://openi.pcl.ac.cn/benchmark/?username=admin&algType=tracking","first":"多目标跟踪","second":[{"id":1,"value":"无","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} cloudbrain.morethanonejob=您已经创建了一个正在等待或运行中的同类任务,请等待任务结束再创建。 cloudbrain.morethanonejob1=您已经有 同类任务 正在等待或运行中,请等待任务结束再创建; cloudbrain.morethanonejob2=可以在 “个人中心 > 云脑任务” 查看您所有的云脑任务。 @@ -1257,7 +1275,7 @@ model_Evaluation_not_created = 未创建过评测任务 repo_not_initialized = 代码版本:您还没有初始化代码仓库,请先创建代码版本; debug_task_running_limit = 运行时长:最长不超过4个小时,超过4个小时将自动停止; dataset_desc = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境; -platform_instructions = 使用说明:可以参考启智AI协作平台小白训练营课程。 +platform_instructions = 使用说明:可以参考启智AI协作平台小白训练营课程。 platform_instructions1 = 使用说明:可以参考启智AI协作平台 platform_instructions2 = 小白训练营课程 platform_instructions3 = 。 @@ -1300,6 +1318,11 @@ model.manage.select.engine=选择模型框架 model.manage.modelfile=模型文件 model.manage.modellabel=模型标签 model.manage.modeldesc=模型描述 +model.manage.modelaccess=模型权限 +model.manage.modelaccess.public=公开 +model.manage.modelaccess.private=私有 +model.manage.modelaccess.setpublic=设为公开 +model.manage.modelaccess.setprivate=设为私有 model.manage.baseinfo=基本信息 modelconvert.notcreate=未创建过模型转换任务 modelconvert.importfirst1=请您先导入 @@ -1411,6 +1434,11 @@ star=点赞 fork=派生 download_archive=下载此项目 star_fail=%s失败。 +watched=已关注 +notWatched=未关注 +un_watch=不关注 +watch_all=关注所有动态 +watch_no_notify=关注但不提醒动态 no_desc=暂无描述 @@ -1454,6 +1482,8 @@ normal_view=普通视图 line=行 lines=行 +notebook_open = 在Notebook中打开 + editor.new_file=新建文件 editor.upload_file=上传文件 editor.edit_file=编辑文件 @@ -3206,6 +3236,8 @@ foot.copyright= 版权所有:新一代人工智能开源开放平台(OpenI Platform_Tutorial=新手指引 foot.advice_feedback = 意见反馈 resource_description = 资源说明 +foot.openi_subscription_number = 启智社区订阅号 +foot.user_communication_group = 用户交流群 [cloudbrain] all_resource_cluster=全部集群 @@ -3239,6 +3271,8 @@ card_duration = 运行卡时 card_type = 卡类型 wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。 +result_cleared=本任务的文件已被清理,无法再次调试,请新建调试任务。 + job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。 train_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,预训练模型存放在运行参数 ckpt_url 中,训练输出路径存储在运行参数 train_url 中。 infer_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,推理输出路径存储在运行参数 result_url 中。 diff --git a/public/home/home.js b/public/home/home.js index 853c3ef23..aeb51b184 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -9,16 +9,20 @@ if(isEmpty(token)){ var swiperNewMessage = new Swiper(".newslist", { direction: "vertical", - slidesPerView: 9, + slidesPerView: 6, loop: true, + spaceBetween: 8, autoplay: { delay: 2500, disableOnInteraction: false, }, }); -var swiperEvent = new Swiper(".event-list", { - slidesPerView: 3, - spaceBetween: 30, + +var swiperRepo = new Swiper(".homepro-list", { + slidesPerView: 1, + // slidesPerColumn: 2, + // slidesPerColumnFill:'row', + spaceBetween: 20, pagination: { el: ".swiper-pagination", clickable: true, @@ -27,49 +31,130 @@ var swiperEvent = new Swiper(".event-list", { delay: 2500, disableOnInteraction: false, }, + breakpoints: { + 768: { + slidesPerView: 2, + }, + 1024: { + slidesPerView: 2, + }, + 1200: { + slidesPerView: 3, + }, + 1440: { + slidesPerView: 3, + }, + 1840: { + slidesPerView: 3, + }, + 1920: { + slidesPerView: 3, + }, + }, }); -var swiperRepo = new Swiper(".homepro-list", { + +var swiperOrg = new Swiper(".homeorg-list", { slidesPerView: 1, slidesPerColumn: 2, slidesPerColumnFill:'row', - spaceBetween: 30, + spaceBetween: 25, pagination: { el: ".swiper-pagination", clickable: true, }, autoplay: { - delay: 2500, + delay: 4500, disableOnInteraction: false, }, breakpoints: { 768: { - slidesPerView: 2, + slidesPerView: 3, + slidesPerColumn: 2, }, 1024: { slidesPerView: 3, + slidesPerColumn: 2, + }, + 1200: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1440: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1840: { + slidesPerView: 4, + slidesPerColumn: 2, + }, + 1920: { + slidesPerView: 4, + slidesPerColumn: 2, }, }, }); -var swiperOrg = new Swiper(".homeorg-list", { +var swiperUserExp = new Swiper(".home-user-exp-list", { slidesPerView: 1, - slidesPerColumn: 4, + spaceBetween: 0, + navigation: { + nextEl: '.homeuserexp .swiper-prev', + prevEl: '.homeuserexp .swiper-next', + }, + autoplay: { + delay: 2500, + disableOnInteraction: false, + }, + breakpoints: { + 768: { + slidesPerView: 2, + }, + 1200: { + slidesPerView: 3, + }, + 1440: { + slidesPerView: 4, + }, + 1840: { + slidesPerView: 4, + }, + 1920: { + slidesPerView: 5, + }, + }, +}); + +var swiperDataset = new Swiper(".home-dataset-list", { + slidesPerView: 2, + slidesPerColumn: 1, slidesPerColumnFill:'row', - spaceBetween: 15, + spaceBetween: 30, pagination: { el: ".swiper-pagination", clickable: true, }, autoplay: { - delay: 4500, + delay: 2500, disableOnInteraction: false, }, breakpoints: { + 676: { + slidesPerView: 3, + }, 768: { - slidesPerView: 2, + slidesPerView: 4, }, - 1024: { - slidesPerView: 3, + 1320: { + slidesPerView: 5, + }, + 1520: { + slidesPerView: 6, + }, + 1720: { + slidesPerView: 7, + }, + 1920: { + slidesPerView: 7, }, }, }); @@ -85,7 +170,7 @@ document.onreadystatechange = function () { if(document.readyState != "complete"){ return; } - console.log("Start to open WebSocket." + document.readyState); + console.log("Start to open WebSocket." + document.readyState); queryRecommendData(); var output = document.getElementById("newmessage"); @@ -101,6 +186,7 @@ document.onreadystatechange = function () { }; socket.onmessage = function (e) { + if (!output) return; var data =JSON.parse(e.data) var html = ""; if (data != null){ @@ -177,18 +263,17 @@ document.onreadystatechange = function () { var time = getTime(record.CreatedUnix,currentTime); html += " " + time; } - html += ""; + html += ""; html += ""; } } output.innerHTML = html; + $('#homenews p').show(); swiperNewMessage.updateSlides(); swiperNewMessage.updateProgress(); }; } - - function getTaskLink(record){ var re = getRepoLink(record); if(record.OpType == 24){ @@ -223,7 +308,7 @@ function getMsg(record){ }else{ console.log("act user is null."); } - html += " \"\"" + html += "
\"\"" html += "
" html += " " + name + "" return html; @@ -236,6 +321,7 @@ function getRepotext(record){ return record.Repo.OwnerName + "/" + record.Repo.Name; } } + function getRepoLink(record){ return encodeURI(record.Repo.OwnerName + "/" + record.Repo.Name); @@ -437,10 +523,6 @@ function getAction(opType,isZh){ } } - - - - function queryRecommendData(){ $.ajax({ type:"GET", @@ -453,7 +535,10 @@ function queryRecommendData(){ success:function(json){ displayOrg(json.org); displayRepo(json.repo); - displayActivity(json.image); + displayActivity(json.activity); + displayDataset(json.dataset); + displayUserExp(json.user_experience); + LetterAvatar && LetterAvatar.transform(); }, error:function(response) { } @@ -463,49 +548,99 @@ function queryRecommendData(){ function displayActivity(json){ var activityDiv = document.getElementById("recommendactivity"); + if (!activityDiv) return; var html = ""; if (json != null && json.length > 0){ for(var i = 0; i < json.length;i++){ - var record = json[i] - html += "
"; - html += ""; - html += "
" + var record = json[i]; + var name = isZh ? (record["name"] || '') : (record["name_en"] || record["name"]); + html += "
"; } + var swiperEvent = new Swiper(".event-list", { + slidesPerView: 1, + spaceBetween: 30, + // pagination: { + // el: ".swiper-pagination", + // clickable: true, + // }, + autoplay: { + delay: 2500, + disableOnInteraction: false, + }, + breakpoints: { + 768: { + slidesPerView: Math.min(2, json.length), + }, + 1024: { + slidesPerView: Math.min(3, json.length), + }, + 1200: { + slidesPerView: Math.min(3, json.length), + }, + 1440: { + slidesPerView: Math.min(4, json.length), + }, + 1840: { + slidesPerView: Math.min(4, json.length), + }, + 1920: { + slidesPerView: Math.min(4, json.length), + }, + }, + }); + activityDiv.innerHTML = html; + swiperEvent.updateSlides(); + swiperEvent.updateProgress(); } - activityDiv.innerHTML = html; - swiperEvent.updateSlides(); - swiperEvent.updateProgress(); } function displayRepo(json){ var orgRepo = document.getElementById("recommendrepo"); var html = ""; if (json != null && json.length > 0){ - for(var i = 0; i < json.length;i++){ - var record = json[i] - html += "
"; - html += "
"; - html += "
"; - html += " "; - html += " " + record["NumStars"] + "" + record["NumForks"]; - html += " "; - html += " "; - html += " " + record["Alias"] +""; - html += "
" + record["Description"] + "
"; - html += "
" - if(record["Topics"] != null){ - for(var j = 0; j < record["Topics"].length; j++){ - topic = record["Topics"][j]; - url = "/explore/repos?q=" + (topic) + "&topic=" - html += "" + topic + ""; - } + var repoMap = {}; + for (var i = 0, iLen = json.length; i < iLen; i++) { + var repo = json[i]; + var label = isZh ? repo.Label : repo.Label_en; + if (repoMap[label]) { + repoMap[label].push(repo); + } else { + repoMap[label] = [repo]; } - html += "
"; - html += "
"; - html += "
"; - html += "
"; + } + + for (var label in repoMap) { + var repos = repoMap[label]; + var labelSearch = repos[0].Label; + html += `
`; + for (var i = 0, iLen = repos.length; i < iLen; i++) { + if (i >= 4) break; + var repo = repos[i]; + // ${repo["NumStars"]}${repo["NumForks"]}
+ html += `
+
+ ${repo["Avatar"] ? `` : ``} + ${repo["Alias"]} +
${repo["Description"]}
+ `; + // if (repo["Topics"] != null) { + // for(var j = 0; j < repo["Topics"].length; j++){ + // var topic = repo["Topics"][j]; + // var url = "/explore/repos?q=" + (topic) + "&topic=" + // html += `${topic}`; + // } + // } + html += ` +
+
`; + } + html += '
' } } orgRepo.innerHTML = html; @@ -513,7 +648,6 @@ function displayRepo(json){ swiperRepo.updateProgress(); } - function getRepoOrOrg(key,isZhLang,numbers=1){ if(numbers > 1){ key+="1"; @@ -537,7 +671,7 @@ function displayOrg(json){ html += " "; html += "
"; html += " " + record["Name"] + " " + record["FullName"]; - html += "
" + record["NumRepos"] +" " + getRepoOrOrg(1,isZh,record["NumRepos"]) + " ・ " + record["NumMembers"] +" " + getRepoOrOrg(2,isZh,record["NumMembers"]) + " ・ " + record["NumTeams"] + " " + getRepoOrOrg(3,isZh,record["NumTeams"]) + "
"; + html += "
" + record["NumRepos"] +" " + getRepoOrOrg(1,isZh,record["NumRepos"]) + " ・ " + record["NumMembers"] +" " + getRepoOrOrg(2,isZh,record["NumMembers"]) + " ・ " + record["NumTeams"] + " " + getRepoOrOrg(3,isZh,record["NumTeams"]) + "
"; html += "
"; html += "
"; html += "
"; @@ -548,3 +682,187 @@ function displayOrg(json){ orgDiv.innerHTML = html; swiperOrg.updateSlides(); } + +function displayDataset(data) { + var homeDatasetEl = document.getElementById("home_dataset"); + if (!homeDatasetEl) return; + var html = ''; + var svgStrMap = { + '0': '', + '1': '', + '2': '', + '3': '', + '4': '', + '5': '', + '6': '', + } + for (var i = 0, iLen = data.length; i < iLen; i++) { + var dataI = data[i]; + html += `` + } + homeDatasetEl.innerHTML = html; + swiperDataset.updateSlides(); + swiperDataset.updateProgress(); +} + +function displayUserExp(data) { + var homeUserExpEl = document.getElementById("home_user-exp"); + if (!homeUserExpEl) return; + var html = ''; + for (var i = 0, iLen = data.length; i < iLen; i++) { + var dataI = data[i]; + html += `
+
+
+ +
+
+
+
${dataI.fullname || dataI.name}
+
${dataI.desc}
+
+
` + } + homeUserExpEl.innerHTML = html; + swiperUserExp.updateSlides(); + swiperUserExp.updateProgress(); +} + +function getNotice() { + $.ajax({ + type:"GET", + url:"/dashboard/invitation", + headers: { authorization:token, }, + dataType:"json", + data: { + filename: 'notice/notice.json', + }, + success:function(json){ + if (json) { + try { + var noticeList = JSON.parse(json).Notices || []; + var noticeEls = $('._hm-recommend-info-area-1 a._hm-notice'); + for (var i = 0, iLen = noticeEls.length; i < iLen; i++) { + var noticeEl = noticeEls.eq(i); + var noticeObj = noticeList[i]; + if (noticeObj) { + var title = isZh ? noticeObj.Title : (noticeObj.Title_en || noticeObj.Title); + noticeEl.attr('href', noticeObj.Link); + noticeEl.find('span').text(title).attr('title', title); + noticeEl.show(); + } else { + noticeEl.hide(); + } + } + } catch (e) { + console.info(e); + } + } + }, + error:function(response) { + } + }); +} + +function getRecommendModule() { + $.ajax({ + type:"GET", + url:"/dashboard/invitation", + headers: { authorization:token, }, + dataType:"json", + data: { + filename: 'home/newfunction', + }, + success:function(json){ + if (json) { + try { + var recommendModuleList = JSON.parse(json) || []; + var recommendModuleEls = $('._hm-recommend-info-area a._hm-link'); + for (var i = 0, iLen = recommendModuleEls.length; i < iLen; i++) { + var recommendModuleEl = recommendModuleEls.eq(i); + var recommendModuleObj = recommendModuleList[i]; + if (recommendModuleObj) { + recommendModuleEl.attr('href', recommendModuleObj.image_link); + recommendModuleEl.text(isZh ? recommendModuleObj.name : (recommendModuleObj.name_en || recommendModuleObj.name)); + } else { + } + } + } catch (e) { + console.info(e); + } + } + }, + error:function(response) { + } + }); +} + +function initHomeTopBanner() { + var homeSlideTimer = null; + var homeSlideDuration = 8000; + function homeSlide(direction, index) { + var slidePages = $('._hm-pg-c ._hm-pg'); + var currentPage = slidePages.filter('._hm-pg-show'); + var slidePagination = $('._hm-slide-pagination-c ._hm-slide-pagination-item'); + var currentIndex = currentPage.index(); + var next = 0; + if (direction) { + next = direction == 'left' ? currentIndex - 1 : currentIndex + 1; + } else { + next = index || 0; + } + if (next < 0) next = slidePages.length - 1; + if (next == slidePages.length) next = 0; + slidePages.removeClass('_hm-pg-show'); + slidePages.eq(next).addClass('_hm-pg-show'); + slidePagination.removeClass('_hm-slide-pagination-item-active'); + slidePagination.eq(next).addClass('_hm-slide-pagination-item-active'); + } + + function startSlide() { + homeSlideTimer && clearTimeout(homeSlideTimer); + homeSlideTimer = setTimeout(function() { + homeSlide('right'); + startSlide(); + }, homeSlideDuration); + } + + function stopSlide() { + homeSlideTimer && clearTimeout(homeSlideTimer); + } + + $('._hm-slide-btn').on('click', function () { + if ($(this).hasClass('_hm-slide-btn-left')) { + homeSlide('left'); + } else { + homeSlide('right'); + } + startSlide(); + }); + $('._hm-pg #homenews').on('mouseenter', function() { + stopSlide(); + }).on('mouseleave', function() { + startSlide(); + }); + $('._hm-slide-pagination-c ._hm-slide-pagination-item').on('click', function() { + var self = $(this); + if (self.hasClass('_hm-slide-pagination-item-active')) return; + homeSlide('', self.index()); + startSlide(); + }); + setTimeout(function() { startSlide(); }, 500); +} + +initHomeTopBanner(); +getNotice(); +getRecommendModule(); diff --git a/public/img/search.svg b/public/img/search.svg index ec91b07dd..a4d965f9a 100644 --- a/public/img/search.svg +++ b/public/img/search.svg @@ -1 +1 @@ - \ No newline at end of file + \ No newline at end of file diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go index cbf6782ed..91685251b 100755 --- a/routers/admin/cloudbrains.go +++ b/routers/admin/cloudbrains.go @@ -17,6 +17,7 @@ import ( "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" ) const ( @@ -95,6 +96,8 @@ func CloudBrains(ctx *context.Context) { models.LoadSpecs4CloudbrainInfo(ciTasks) for i, task := range ciTasks { + ciTasks[i] = cloudbrainService.UpdateCloudbrainAiCenter(ciTasks[i]) + ciTasks[i].Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(ciTasks[i].Cloudbrain.AiCenter, ctx.Language()) ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource @@ -186,7 +189,8 @@ func DownloadCloudBrains(ctx *context.Context) { } models.LoadSpecs4CloudbrainInfo(pageRecords) for _, record := range pageRecords { - + record = cloudbrainService.UpdateCloudbrainAiCenter(record) + record.Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(record.Cloudbrain.AiCenter, ctx.Language()) for k, v := range allValues(row, record, ctx) { f.SetCellValue(cloudBrain, k, v) } @@ -208,7 +212,7 @@ func allValues(row int, rs *models.CloudbrainInfo, ctx *context.Context) map[str return map[string]string{getCellName("A", row): rs.DisplayJobName, getCellName("B", row): repo.GetCloudbrainCluster(rs.Cloudbrain, ctx), getCellName("C", row): rs.JobType, getCellName("D", row): rs.Status, getCellName("E", row): time.Unix(int64(rs.Cloudbrain.CreatedUnix), 0).Format(CREATE_TIME_FORMAT), getCellName("F", row): getDurationTime(rs), getCellName("G", row): rs.ComputeResource, - getCellName("H", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), getCellName("I", row): getCloudbrainCardType(rs), + getCellName("H", row): rs.Cloudbrain.AiCenter, getCellName("I", row): getCloudbrainCardType(rs), getCellName("J", row): rs.Name, getCellName("K", row): getRepoPathName(rs), getCellName("L", row): rs.JobName, } } diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 5ee065514..3e50b00fc 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -544,6 +544,10 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/complete_multipart", repo.CompleteMultipart) }, reqToken()) + m.Group("/pipeline", func() { + m.Post("/notification", bind(api.PipelineNotification{}), notify.PipelineNotify) + + }, reqToken()) // Notifications m.Group("/notifications", func() { @@ -610,10 +614,12 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/query_invitation_yesterday", operationReq, repo_ext.QueryInvitationYesterday) m.Get("/query_invitation_all", operationReq, repo_ext.QueryInvitationAll) m.Get("/query_invitation_userdefine", operationReq, repo_ext.QueryUserDefineInvitationPage) + m.Get("/query_user_annual_report", repo_ext.QueryUserAnnualReport) m.Get("/download_invitation_detail", operationReq, repo_ext.DownloadInvitationDetail) //cloudbrain board + m.Get("/cloudbrainboard/cloudbrain/resource_queues", repo.GetResourceQueues) m.Group("/cloudbrainboard", func() { m.Get("/downloadAll", repo.DownloadCloudBrainBoard) m.Group("/cloudbrain", func() { @@ -736,6 +742,12 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple) }, reqToken(), repoAssignment()) + m.Group("/file_notebook", func() { + m.Get("", repo.GetFileNoteBookInfo) + m.Post("/create", reqToken(), reqWeChat(), bind(api.CreateFileNotebookJobOption{}), repo.CreateFileNoteBook) + + }) + m.Group("/repos", func() { m.Get("/search", repo.Search) @@ -751,6 +763,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/:username/:reponame", func() { m.Get("/right", reqToken(), repo.GetRight) m.Get("/tagger", reqToken(), repo.ListTagger) + m.Get("/cloudBrainJobId", repo.GetCloudBrainJobId) m.Combo("").Get(reqAnyRepoReader(), repo.Get). Delete(reqToken(), reqOwner(), repo.Delete). Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit) @@ -987,6 +1000,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow) m.Get("/model_list", repo.CloudBrainModelList) m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop) + m.Put("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GeneralCloudBrainJobStop) }) }) m.Group("/inference-job", func() { @@ -1007,12 +1021,15 @@ func RegisterRoutes(m *macaron.Macaron) { m.Delete("/delete_model", repo.DeleteModel) m.Get("/downloadall", repo.DownloadModel) m.Get("/query_model_byId", repo.QueryModelById) + m.Get("/query_model_byName", repo.QueryModelByName) m.Get("/query_model_for_predict", repo.QueryModelListForPredict) m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict) m.Get("/query_train_model", repo.QueryTrainModelList) m.Post("/create_model_convert", repo.CreateModelConvert) + m.Post("/convert_stop", repo.StopModelConvert) m.Get("/show_model_convert_page", repo.ShowModelConvertPage) m.Get("/query_model_convert_byId", repo.QueryModelConvertById) + m.Get("/query_model_convert_byName", repo.QueryModelConvertByName) m.Get("/:id", repo.GetCloudbrainModelConvertTask) m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog) diff --git a/routers/api/v1/notify/pipeline.go b/routers/api/v1/notify/pipeline.go new file mode 100644 index 000000000..021af20dc --- /dev/null +++ b/routers/api/v1/notify/pipeline.go @@ -0,0 +1,15 @@ +package notify + +import ( + "net/http" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + api "code.gitea.io/gitea/modules/structs" +) + +func PipelineNotify(ctx *context.APIContext, form api.PipelineNotification) { + + ctx.JSON(http.StatusOK, models.BaseOKMessageApi) + +} diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go index 7022dc011..1c5a58b47 100755 --- a/routers/api/v1/repo/cloudbrain.go +++ b/routers/api/v1/repo/cloudbrain.go @@ -11,11 +11,14 @@ import ( "io" "net/http" "os" + "path" "sort" "strconv" "strings" "time" + "code.gitea.io/gitea/modules/grampus" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" "code.gitea.io/gitea/modules/convert" @@ -79,6 +82,98 @@ func CloudBrainShow(ctx *context.APIContext) { ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)}) } +func GeneralCloudBrainJobStop(ctx *context.APIContext) { + task := ctx.Cloudbrain + if task.IsTerminal() { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Already_stopped")) + return + } + var err error + + if ctx.Cloudbrain.Type == models.TypeCloudBrainOne { + err = cloudbrain.StopJob(task.JobID) + } else if ctx.Cloudbrain.Type == models.TypeCloudBrainTwo { + _, err = modelarts.StopTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10)) + } else { + _, err = grampus.StopJob(task.JobID) + } + + if err != nil { + log.Warn("cloud brain stopped failed.", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Stopped_failed")) + return + } + + ctx.JSON(http.StatusOK, models.BaseOKMessageApi) +} +func CreateFileNoteBook(ctx *context.APIContext, option api.CreateFileNotebookJobOption) { + cloudbrainTask.FileNotebookCreate(ctx.Context, option) +} + +func GetFileNoteBookInfo(ctx *context.APIContext) { + //image description spec description waiting count + + specs, err := models.GetResourceSpecificationByIds([]int64{setting.FileNoteBook.SpecIdCPU, setting.FileNoteBook.SpecIdGPU, setting.FileNoteBook.SpecIdNPU, setting.FileNoteBook.SpecIdNPUCD}) + if err != nil { + log.Error("Fail to query specifications", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_query_fail"))) + return + } + + var specCPU, specGpu, specNPU, specNPUCD *api.SpecificationShow + var specGpuQueueCode string + for _, spec := range specs { + if spec.ID == setting.FileNoteBook.SpecIdCPU { + specCPU = convert.ToSpecification(spec) + } else if spec.ID == setting.FileNoteBook.SpecIdGPU { + specGpu = convert.ToSpecification(spec) + specGpuQueueCode = spec.QueueCode + } else if spec.ID == setting.FileNoteBook.SpecIdNPU { + specNPU = convert.ToSpecification(spec) + } else if spec.ID == setting.FileNoteBook.SpecIdNPUCD { + specNPUCD = convert.ToSpecification(spec) + } + } + + waitCountNpu := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") + + queuesMap, err := cloudbrain.GetQueuesDetail() + if err != nil { + log.Error("Fail to query gpu queues waiting count", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_query_fail"))) + return + } + waitCountGPU := (*queuesMap)[specGpuQueueCode] + if !setting.ModelartsCD.Enabled { + ctx.JSON(http.StatusOK, map[string]interface{}{ + "code": 0, + "projectName": setting.FileNoteBook.ProjectName, + "specCpu": specCPU, + "specGpu": specGpu, + "specNpu": specNPU, + "waitCountGpu": waitCountGPU, + "waitCountNpu": waitCountNpu, + "imageCpuDescription": setting.FileNoteBook.ImageCPUDescription, + "imageGpuDescription": setting.FileNoteBook.ImageGPUDescription, + "imageNpuDescription": setting.FileNoteBook.ImageNPUDescription, + }) + } else { + ctx.JSON(http.StatusOK, map[string]interface{}{ + "code": 0, + "projectName": setting.FileNoteBook.ProjectName, + "specCpu": specCPU, + "specGpu": specGpu, + "specNpu": specNPUCD, + "waitCountGpu": waitCountGPU, + "waitCountNpu": waitCountNpu, + "imageCpuDescription": setting.FileNoteBook.ImageCPUDescription, + "imageGpuDescription": setting.FileNoteBook.ImageGPUDescription, + "imageNpuDescription": setting.FileNoteBook.ImageNPUCDDescription, + }) + + } + +} func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) { if option.Type == cloudbrainTask.TaskTypeCloudbrainOne { @@ -141,10 +236,11 @@ func GetCloudbrainTask(ctx *context.APIContext) { ) ID := ctx.Params(":id") - job, err := models.GetCloudbrainByID(ID) + + job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID) + if err != nil { ctx.NotFound(err) - log.Error("GetCloudbrainByID failed:", err) return } if job.JobType == string(models.JobTypeModelSafety) { @@ -487,6 +583,12 @@ func ModelSafetyGetLog(ctx *context.APIContext) { }) return } + prefix := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, job.JobName, modelarts.LogPath, job.VersionName), "/") + "/job" + _, err = storage.GetObsLogFileName(prefix) + canLogDownload := isCanDownloadLog(ctx, job) + if err != nil { + canLogDownload = false + } ctx.Data["log_file_name"] = resultLogFile.LogFileList[0] ctx.JSON(http.StatusOK, map[string]interface{}{ "JobID": job.JobID, @@ -495,7 +597,7 @@ func ModelSafetyGetLog(ctx *context.APIContext) { "EndLine": result.EndLine, "Content": result.Content, "Lines": result.Lines, - "CanLogDownload": isCanDownloadLog(ctx, job), + "CanLogDownload": canLogDownload, "StartTime": job.StartTime, }) } @@ -566,7 +668,6 @@ func CloudbrainDownloadLogFile(ctx *context.Context) { url, err := storage.Attachments.PresignedGetURL(prefix+"/"+fileName, fileName) if err != nil { log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("Get minio get SignedUrl failed", err) return } log.Info("fileName=" + fileName) @@ -596,12 +697,24 @@ func CloudbrainGetLog(ctx *context.APIContext) { existStr = taskRes.TaskStatuses[0].ExitDiagnostics } ctx.Data["existStr"] = existStr - log.Info("existStr=" + existStr) } else { ModelSafetyGetLog(ctx) return } + } + if job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference) { + if job.Type == models.TypeCloudBrainOne { + result, err := cloudbrain.GetJob(job.JobID) + existStr := "" + if err == nil && result != nil { + jobRes, _ := models.ConvertToJobResultPayload(result.Payload) + taskRoles := jobRes.TaskRoles + taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) + existStr = taskRes.TaskStatuses[0].ExitDiagnostics + } + ctx.Data["existStr"] = existStr + } } lines := ctx.QueryInt("lines") @@ -638,7 +751,7 @@ func CloudbrainGetLog(ctx *context.APIContext) { result = getLogFromModelDir(job.JobName, startLine, endLine, resultPath) if result == nil { log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"]) - ctx.ServerError(err.Error(), err) + //ctx.ServerError(err.Error(), err) return } } @@ -646,9 +759,11 @@ func CloudbrainGetLog(ctx *context.APIContext) { if result["Content"] != nil { content = result["Content"].(string) } + if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 { content = content + ctx.Data["existStr"].(string) } + logFileName := result["FileName"] //Logs can only be downloaded if the file exists @@ -851,7 +966,7 @@ func CloudBrainModelConvertList(ctx *context.APIContext) { err = json.Unmarshal([]byte(dirs), &fileInfos) if err != nil { log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("json.Unmarshal failed:", err) + //ctx.ServerError("json.Unmarshal failed:", err) return } @@ -882,7 +997,7 @@ func CloudBrainModelConvertList(ctx *context.APIContext) { models, err := storage.GetObsListObject(job.ID, "output/", parentDir, versionName) if err != nil { log.Info("get TrainJobListModel failed:", err) - ctx.ServerError("GetObsListObject:", err) + //ctx.ServerError("GetObsListObject:", err) return } @@ -927,7 +1042,7 @@ func CloudBrainModelList(ctx *context.APIContext) { err = json.Unmarshal([]byte(dirs), &fileInfos) if err != nil { log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"]) - ctx.ServerError("json.Unmarshal failed:", err) + //ctx.ServerError("json.Unmarshal failed:", err) return } diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go index d1ccf1bf5..0d68fff30 100755 --- a/routers/api/v1/repo/cloudbrain_dashboard.go +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -11,7 +11,10 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/routers/repo" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/services/cloudbrain/resource" "github.com/360EntSecGroup-Skylar/excelize/v2" ) @@ -56,42 +59,30 @@ func GetAllCloudbrainsOverview(ctx *context.Context) { return } cloudbrainTypeCount, err := models.GetCloudbrainTypeCount() - log.Info("cloudbrainTypeCount:", cloudbrainTypeCount) if err != nil { log.Error("Can not query cloudbrainTypeCount.", err) return } - cloudbrainTpyeDurationSum, err := models.GetCloudbrainTpyeDurationSum() - log.Info("cloudbrainTpyeDurationSum:", cloudbrainTpyeDurationSum) - if err != nil { - log.Error("Can not query cloudbrainTpyeDurationSum.", err) - return - } - todayCloudbrainCount, err := models.GetTodayCloudbrainCount(beginTime, endTime) - log.Info("todayCloudbrainCount:", todayCloudbrainCount) if err != nil { log.Error("Can not query todayCloudbrainCount.", err) return } todayRunningCount, err := models.GetTodayRunningCount(beginTime, endTime) - log.Info("todayRunningCount:", todayRunningCount) if err != nil { log.Error("Can not query todayRunningCount.", err) return } todayWaitingCount, err := models.GetTodayWaitingCount(beginTime, endTime) - log.Info("todayWaittingCount:", todayWaitingCount) if err != nil { log.Error("Can not query todayWaitingCount.", err) return } todayCompletedCount := todayCloudbrainCount - todayRunningCount - todayWaitingCount - log.Info("todayCompletedCount:", todayCompletedCount) creatorCount, err := models.GetCreatorCount() if err != nil { @@ -121,8 +112,9 @@ func GetOverviewDuration(ctx *context.Context) { recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix now := time.Now() endTime := now - worker_server_num := 1 - cardNum := 1 + var workServerNumber int64 + var cardNum int64 + durationAllSum := int64(0) cardDuSum := int64(0) @@ -136,7 +128,7 @@ func GetOverviewDuration(ctx *context.Context) { c2NetDuration := int64(0) cDCenterDuration := int64(0) - cloudbrains, _, err := models.CloudbrainAllStatic(&models.CloudbrainsOptions{ + cloudbrains, _, err := models.CloudbrainAllKanBan(&models.CloudbrainsOptions{ Type: models.TypeCloudBrainAll, BeginTimeUnix: int64(recordBeginTime), EndTimeUnix: endTime.Unix(), @@ -148,34 +140,36 @@ func GetOverviewDuration(ctx *context.Context) { models.LoadSpecs4CloudbrainInfo(cloudbrains) for _, cloudbrain := range cloudbrains { - if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { - worker_server_num = cloudbrain.Cloudbrain.WorkServerNumber + cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) + if cloudbrain.Cloudbrain.Spec != nil { + cardNum = int64(cloudbrain.Cloudbrain.Spec.AccCardsNum) } else { - worker_server_num = 1 - } - if cloudbrain.Cloudbrain.Spec == nil { cardNum = 1 + } + if cloudbrain.Cloudbrain.WorkServerNumber >= 1 { + workServerNumber = int64(cloudbrain.Cloudbrain.WorkServerNumber) } else { - cardNum = cloudbrain.Cloudbrain.Spec.AccCardsNum + workServerNumber = 1 } - duration := cloudbrain.Duration - durationSum := cloudbrain.Duration * int64(worker_server_num) * int64(cardNum) + duration := models.ConvertStrToDuration(cloudbrain.TrainJobDuration) + CardDuration := workServerNumber * int64(cardNum) * duration + if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { cloudBrainOneDuration += duration - cloudBrainOneCardDuSum += durationSum + cloudBrainOneCardDuSum += CardDuration } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { cloudBrainTwoDuration += duration - cloudBrainTwoCardDuSum += durationSum + cloudBrainTwoCardDuSum += CardDuration } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net { c2NetDuration += duration - c2NetCardDuSum += durationSum + c2NetCardDuSum += CardDuration } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { cDCenterDuration += duration - cDNetCardDuSum += durationSum + cDNetCardDuSum += CardDuration } durationAllSum += duration - cardDuSum += durationSum + cardDuSum += CardDuration } ctx.JSON(http.StatusOK, map[string]interface{}{ "cloudBrainOneCardDuSum": cloudBrainOneCardDuSum, @@ -192,6 +186,28 @@ func GetOverviewDuration(ctx *context.Context) { }) } +func GetCloudbrainCardDuration(task models.Cloudbrain) string { + cardNum := int(0) + spec, err := resource.GetCloudbrainSpec(task.ID) + if err != nil { + log.Info("error:" + err.Error()) + return "" + } + if spec != nil { + cardNum = spec.AccCardsNum + } else { + cardNum = 1 + } + var workServerNumber int64 + if task.WorkServerNumber >= 1 { + workServerNumber = int64(task.WorkServerNumber) + } else { + workServerNumber = 1 + } + cardDuration := models.ConvertDurationToStr(workServerNumber * int64(cardNum) * task.Duration) + return cardDuration +} + func GetAllCloudbrainsTrend(ctx *context.Context) { queryType := ctx.QueryTrim("type") @@ -703,6 +719,30 @@ func GetCloudbrainsDetailData(ctx *context.Context) { aiCenter := ctx.Query("aiCenter") needDeleteInfo := ctx.Query("needDeleteInfo") + if cloudBrainType == models.TypeCloudBrainOne && aiCenter == models.AICenterOfCloudBrainOne { + aiCenter = "" + } + if cloudBrainType == models.TypeCloudBrainTwo && aiCenter == models.AICenterOfCloudBrainTwo { + aiCenter = "" + } + if cloudBrainType == models.TypeCDCenter && aiCenter == models.AICenterOfChengdu { + aiCenter = "" + } + if cloudBrainType == models.TypeCloudBrainAll { + if aiCenter == models.AICenterOfCloudBrainOne { + cloudBrainType = models.TypeCloudBrainOne + aiCenter = "" + } + if aiCenter == models.AICenterOfCloudBrainTwo { + cloudBrainType = models.TypeCloudBrainTwo + aiCenter = "" + } + if aiCenter == models.AICenterOfChengdu { + cloudBrainType = models.TypeCDCenter + aiCenter = "" + } + } + page := ctx.QueryInt("page") pageSize := ctx.QueryInt("pagesize") if page <= 0 { @@ -732,7 +772,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) { keyword := strings.Trim(ctx.Query("q"), " ") - ciTasks, _, err := models.CloudbrainAll(&models.CloudbrainsOptions{ + ciTasks, count, err := models.CloudbrainAll(&models.CloudbrainsOptions{ ListOptions: models.ListOptions{ Page: page, PageSize: pageSize, @@ -747,8 +787,8 @@ func GetCloudbrainsDetailData(ctx *context.Context) { NeedRepoInfo: true, BeginTimeUnix: int64(recordBeginTime), EndTimeUnix: endTime.Unix(), - // AiCenter: aiCenter, - NeedDeleteInfo: needDeleteInfo, + AiCenter: aiCenter, + NeedDeleteInfo: needDeleteInfo, }) if err != nil { ctx.ServerError("Get job failed:", err) @@ -758,45 +798,43 @@ func GetCloudbrainsDetailData(ctx *context.Context) { nilTime := time.Time{} tasks := []models.TaskDetail{} for i, task := range ciTasks { - if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode { - ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource - var taskDetail models.TaskDetail - taskDetail.ID = ciTasks[i].Cloudbrain.ID - taskDetail.JobID = ciTasks[i].Cloudbrain.JobID - taskDetail.JobName = ciTasks[i].JobName - taskDetail.DisplayJobName = ciTasks[i].DisplayJobName - taskDetail.Status = ciTasks[i].Status - taskDetail.JobType = ciTasks[i].JobType - taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix - taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration - taskDetail.StartTime = ciTasks[i].StartTime - taskDetail.EndTime = ciTasks[i].EndTime - taskDetail.ComputeResource = ciTasks[i].ComputeResource - taskDetail.Type = ciTasks[i].Cloudbrain.Type - taskDetail.UserName = ciTasks[i].User.Name - taskDetail.RepoID = ciTasks[i].RepoID - if ciTasks[i].Repo != nil { - taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name - taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias - } - if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { - taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) - } else { - taskDetail.WorkServerNum = 1 - } - taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) - taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) + task = cloudbrainService.UpdateCloudbrainAiCenter(task) + var taskDetail models.TaskDetail + taskDetail.ID = ciTasks[i].Cloudbrain.ID + taskDetail.JobID = ciTasks[i].Cloudbrain.JobID + taskDetail.JobName = ciTasks[i].JobName + taskDetail.DisplayJobName = ciTasks[i].DisplayJobName + taskDetail.Status = ciTasks[i].Status + taskDetail.JobType = ciTasks[i].JobType + taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix + taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration + taskDetail.StartTime = ciTasks[i].StartTime + taskDetail.EndTime = ciTasks[i].EndTime + taskDetail.ComputeResource = ciTasks[i].ComputeResource + taskDetail.Type = ciTasks[i].Cloudbrain.Type + taskDetail.UserName = ciTasks[i].User.Name + taskDetail.RepoID = ciTasks[i].RepoID + taskDetail.AiCenter = repo.GetAiCenterNameByCode(task.Cloudbrain.AiCenter, ctx.Language()) + if ciTasks[i].Repo != nil { + taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name + taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias + } + if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { + taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) + } else { + taskDetail.WorkServerNum = 1 + } + taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) + taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) - if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { - taskDetail.IsDelete = true - } else { - taskDetail.IsDelete = false - } - taskDetail.Spec = ciTasks[i].Spec - tasks = append(tasks, taskDetail) + if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { + taskDetail.IsDelete = true + } else { + taskDetail.IsDelete = false } + taskDetail.Spec = ciTasks[i].Spec + tasks = append(tasks, taskDetail) } - count := int64(len(tasks)) pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize)) pager.SetDefaultParams(ctx) pager.AddParam(ctx, "listType", "ListType") @@ -930,6 +968,8 @@ func GetWaittingTop(ctx *context.Context) { taskDetail.RepoID = ciTasks[i].RepoID if ciTasks[i].Repo != nil { taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name + } else { + taskDetail.RepoName = "" } WaitTimeInt := time.Now().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix() taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt) @@ -937,6 +977,13 @@ func GetWaittingTop(ctx *context.Context) { if WaitTimeInt < 0 { taskDetail.WaitTime = "00:00:00" } + + taskDetail.ID = ciTasks[i].Cloudbrain.ID + taskDetail.ComputeResource = ciTasks[i].Cloudbrain.ComputeResource + taskDetail.JobType = ciTasks[i].Cloudbrain.JobType + taskDetail.JobID = ciTasks[i].Cloudbrain.JobID + taskDetail.Type = ciTasks[i].Cloudbrain.Type + tasks = append(tasks, taskDetail) } ctx.JSON(http.StatusOK, map[string]interface{}{ @@ -963,6 +1010,12 @@ func GetRunningTop(ctx *context.Context) { taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name } + taskDetail.ID = ciTasks[i].Cloudbrain.ID + taskDetail.ComputeResource = ciTasks[i].Cloudbrain.ComputeResource + taskDetail.JobType = ciTasks[i].Cloudbrain.JobType + taskDetail.JobID = ciTasks[i].Cloudbrain.JobID + taskDetail.Type = ciTasks[i].Cloudbrain.Type + tasks = append(tasks, taskDetail) } ctx.JSON(http.StatusOK, map[string]interface{}{ @@ -1176,6 +1229,12 @@ func getMonthCloudbrainInfo(beginTime time.Time, endTime time.Time) ([]DateCloud } func DownloadCloudBrainBoard(ctx *context.Context) { + recordCloudbrain, err := models.GetRecordBeginTime() + if err != nil { + log.Error("Can not get recordCloudbrain", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) + return + } page := 1 @@ -1184,14 +1243,20 @@ func DownloadCloudBrainBoard(ctx *context.Context) { var cloudBrain = ctx.Tr("repo.cloudbrain") fileName := getCloudbrainFileName(cloudBrain) + recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix + now := time.Now() + endTime := now + _, total, err := models.CloudbrainAll(&models.CloudbrainsOptions{ ListOptions: models.ListOptions{ Page: page, PageSize: pageSize, }, - Type: models.TypeCloudBrainAll, - NeedRepoInfo: false, + Type: models.TypeCloudBrainAll, + BeginTimeUnix: int64(recordBeginTime), + EndTimeUnix: endTime.Unix(), }) + log.Info("totalcountisis:", total) if err != nil { log.Warn("Can not get cloud brain info", err) @@ -1216,8 +1281,10 @@ func DownloadCloudBrainBoard(ctx *context.Context) { Page: page, PageSize: pageSize, }, - Type: models.TypeCloudBrainAll, - NeedRepoInfo: true, + Type: models.TypeCloudBrainAll, + BeginTimeUnix: int64(recordBeginTime), + EndTimeUnix: endTime.Unix(), + NeedRepoInfo: true, }) if err != nil { log.Warn("Can not get cloud brain info", err) @@ -1225,7 +1292,8 @@ func DownloadCloudBrainBoard(ctx *context.Context) { } models.LoadSpecs4CloudbrainInfo(pageRecords) for _, record := range pageRecords { - + record = cloudbrainService.UpdateCloudbrainAiCenter(record) + record.Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(record.Cloudbrain.AiCenter, ctx.Language()) for k, v := range allCloudbrainValues(row, record, ctx) { f.SetCellValue(cloudBrain, k, v) } @@ -1264,7 +1332,7 @@ func allCloudbrainValues(row int, rs *models.CloudbrainInfo, ctx *context.Contex getCellName("G", row): rs.TrainJobDuration, getCellName("H", row): repo.GetCloudbrainCardDuration(rs.Cloudbrain), getCellName("I", row): getBrainStartTime(rs), getCellName("J", row): getBrainEndTime(rs), getCellName("K", row): rs.ComputeResource, getCellName("L", row): getCloudbrainCardType(rs), - getCellName("M", row): getWorkServerNum(rs), getCellName("N", row): repo.GetCloudbrainAiCenter(rs.Cloudbrain, ctx), + getCellName("M", row): getWorkServerNum(rs), getCellName("N", row): rs.Cloudbrain.AiCenter, getCellName("O", row): getCloudbrainFlavorName(rs), getCellName("P", row): rs.Name, getCellName("Q", row): getBrainRepo(rs), getCellName("R", row): rs.JobName, getCellName("S", row): getBrainDeleteTime(rs), } @@ -1412,12 +1480,17 @@ func getCloudbrainTimePeroid(ctx *context.Context, recordBeginTime time.Time) (t } func GetCloudbrainResourceOverview(ctx *context.Context) { + var recordBeginTime timeutil.TimeStamp recordCloudbrainDuration, err := models.GetDurationRecordBeginTime() if err != nil { log.Error("Can not get GetDurationRecordBeginTime", err) return } - recordBeginTime := recordCloudbrainDuration[0].CreatedUnix + if len(recordCloudbrainDuration) > 0 && err == nil { + recordBeginTime = recordCloudbrainDuration[0].DateTimeUnix + } else { + recordBeginTime = timeutil.TimeStamp(time.Now().Unix()) + } recordUpdateTime := time.Now().Unix() resourceQueues, err := models.GetCanUseCardInfo() if err != nil { @@ -1428,11 +1501,12 @@ func GetCloudbrainResourceOverview(ctx *context.Context) { C2NetResourceDetail := []models.ResourceDetail{} for _, resourceQueue := range resourceQueues { if resourceQueue.Cluster == models.OpenICluster { + aiCenterName := repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language()) var resourceDetail models.ResourceDetail resourceDetail.QueueCode = resourceQueue.QueueCode resourceDetail.Cluster = resourceQueue.Cluster resourceDetail.AiCenterCode = resourceQueue.AiCenterCode - resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode + resourceDetail.AiCenterName = resourceQueue.AiCenterCode + "/" + aiCenterName resourceDetail.ComputeResource = resourceQueue.ComputeResource resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")" resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum @@ -1440,11 +1514,12 @@ func GetCloudbrainResourceOverview(ctx *context.Context) { OpenIResourceDetail = append(OpenIResourceDetail, resourceDetail) } if resourceQueue.Cluster == models.C2NetCluster { + aiCenterName := repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language()) var resourceDetail models.ResourceDetail resourceDetail.QueueCode = resourceQueue.QueueCode resourceDetail.Cluster = resourceQueue.Cluster resourceDetail.AiCenterCode = resourceQueue.AiCenterCode - resourceDetail.AiCenterName = resourceQueue.AiCenterName + "/" + resourceQueue.AiCenterCode + resourceDetail.AiCenterName = resourceQueue.AiCenterCode + "/" + aiCenterName resourceDetail.ComputeResource = resourceQueue.ComputeResource resourceDetail.AccCardType = resourceQueue.AccCardType + "(" + resourceQueue.ComputeResource + ")" resourceDetail.CardsTotalNum = resourceQueue.CardsTotalNum @@ -1542,6 +1617,7 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { now := time.Now() beginTimeStr := ctx.QueryTrim("beginTime") endTimeStr := ctx.QueryTrim("endTime") + var brainRecordBeginTime time.Time var beginTime time.Time var endTime time.Time @@ -1554,7 +1630,12 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) return beginTime, endTime } - brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime() + if len(recordCloudbrainDuration) > 0 && err == nil { + brainRecordBeginTime = recordCloudbrainDuration[0].DateTimeUnix.AsTime() + } else { + brainRecordBeginTime = now + } + beginTime = brainRecordBeginTime endTime = now } else if queryType == "today" { @@ -1596,7 +1677,11 @@ func getBeginAndEndTime(ctx *context.Context) (time.Time, time.Time) { ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) return beginTime, endTime } - brainRecordBeginTime := recordCloudbrainDuration[0].CreatedUnix.AsTime() + if len(recordCloudbrainDuration) > 0 && err == nil { + brainRecordBeginTime = recordCloudbrainDuration[0].DateTimeUnix.AsTime() + } else { + brainRecordBeginTime = now + } beginTime = brainRecordBeginTime endTime = now } else { @@ -1627,7 +1712,7 @@ func getAiCenterUsageDuration(beginTime time.Time, endTime time.Time, cloudbrain usageRate := float64(0) for _, cloudbrainStatistic := range cloudbrainStatistics { - if int64(cloudbrainStatistic.CreatedUnix) >= beginTime.Unix() && int64(cloudbrainStatistic.CreatedUnix) < endTime.Unix() { + if int64(cloudbrainStatistic.DateTimeUnix) >= beginTime.Unix() && int64(cloudbrainStatistic.DateTimeUnix) < endTime.Unix() { totalDuration += cloudbrainStatistic.CardsTotalDuration usageDuration += cloudbrainStatistic.CardsUseDuration } @@ -1659,28 +1744,29 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati return OpenIDurationRate, C2NetDurationRate, 0 } for _, cloudbrainStatistic := range cardDurationStatistics { + aiCenterName := cloudbrainStatistic.AiCenterCode + "/" + repo.GetAiCenterNameByCode(cloudbrainStatistic.AiCenterCode, "zh-CN") if cloudbrainStatistic.Cluster == models.OpenICluster { - if _, ok := OpenITotalDuration[cloudbrainStatistic.AiCenterName]; !ok { - OpenITotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration + if _, ok := OpenITotalDuration[aiCenterName]; !ok { + OpenITotalDuration[aiCenterName] = cloudbrainStatistic.CardsTotalDuration } else { - OpenITotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration + OpenITotalDuration[aiCenterName] += cloudbrainStatistic.CardsTotalDuration } - if _, ok := OpenIUsageDuration[cloudbrainStatistic.AiCenterName]; !ok { - OpenIUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration + if _, ok := OpenIUsageDuration[aiCenterName]; !ok { + OpenIUsageDuration[aiCenterName] = cloudbrainStatistic.CardsUseDuration } else { - OpenIUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration + OpenIUsageDuration[aiCenterName] += cloudbrainStatistic.CardsUseDuration } } if cloudbrainStatistic.Cluster == models.C2NetCluster { - if _, ok := C2NetTotalDuration[cloudbrainStatistic.AiCenterName]; !ok { - C2NetTotalDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsTotalDuration + if _, ok := C2NetTotalDuration[aiCenterName]; !ok { + C2NetTotalDuration[aiCenterName] = cloudbrainStatistic.CardsTotalDuration } else { - C2NetTotalDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsTotalDuration + C2NetTotalDuration[aiCenterName] += cloudbrainStatistic.CardsTotalDuration } - if _, ok := C2NetUsageDuration[cloudbrainStatistic.AiCenterName]; !ok { - C2NetUsageDuration[cloudbrainStatistic.AiCenterName] = cloudbrainStatistic.CardsUseDuration + if _, ok := C2NetUsageDuration[aiCenterName]; !ok { + C2NetUsageDuration[aiCenterName] = cloudbrainStatistic.CardsUseDuration } else { - C2NetUsageDuration[cloudbrainStatistic.AiCenterName] += cloudbrainStatistic.CardsUseDuration + C2NetUsageDuration[aiCenterName] += cloudbrainStatistic.CardsUseDuration } } } @@ -1690,16 +1776,17 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati return OpenIDurationRate, C2NetDurationRate, 0 } for _, v := range ResourceAiCenterRes { + aiCenterName := v.AiCenterCode + "/" + repo.GetAiCenterNameByCode(v.AiCenterCode, "zh-CN") if cutString(v.AiCenterCode, 4) == cutString(models.AICenterOfCloudBrainOne, 4) { - if _, ok := OpenIUsageDuration[v.AiCenterName]; !ok { - OpenIUsageDuration[v.AiCenterName] = 0 + if _, ok := OpenIUsageDuration[aiCenterName]; !ok { + OpenIUsageDuration[aiCenterName] = 0 } - if _, ok := OpenITotalDuration[v.AiCenterName]; !ok { - OpenITotalDuration[v.AiCenterName] = 0 + if _, ok := OpenITotalDuration[aiCenterName]; !ok { + OpenITotalDuration[aiCenterName] = 0 } } else { - if _, ok := C2NetUsageDuration[v.AiCenterName]; !ok { - C2NetUsageDuration[v.AiCenterName] = 0 + if _, ok := C2NetUsageDuration[aiCenterName]; !ok { + C2NetUsageDuration[aiCenterName] = 0 } } } @@ -1716,7 +1803,7 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati for _, v := range OpenITotalDuration { totalCanUse += float64(v) } - for _, v := range OpenIUsageRate { + for _, v := range OpenIUsageDuration { totalUse += float64(v) } if totalCanUse == 0 || totalUse == 0 { @@ -1724,6 +1811,7 @@ func getDurationStatistic(beginTime time.Time, endTime time.Time) (models.Durati } else { totalUsageRate = totalUse / totalCanUse } + delete(C2NetUsageDuration, "/") OpenIDurationRate.AiCenterTotalDurationStat = OpenITotalDuration OpenIDurationRate.AiCenterUsageDurationStat = OpenIUsageDuration @@ -1831,3 +1919,30 @@ func getHourCloudbrainDuration(beginTime time.Time, endTime time.Time, aiCenterC hourTimeStatistic.HourTimeUsageRate = hourTimeUsageRate return hourTimeStatistic, nil } + +func CloudbrainUpdateAiCenter(ctx *context.Context) { + repo.CloudbrainDurationStatisticHour() + ctx.JSON(http.StatusOK, map[string]interface{}{ + "message": 0, + }) +} + +func GetResourceQueues(ctx *context.Context) { + resourceQueues, err := models.GetCanUseCardInfo() + if err != nil { + log.Error("GetCanUseCardInfo err: %v", err) + return + } + Resource := make([]*models.ResourceQueue, 0) + aiCenterCodeMap := make(map[string]string) + for _, resourceQueue := range resourceQueues { + if _, ok := aiCenterCodeMap[resourceQueue.AiCenterCode]; !ok { + resourceQueue.AiCenterName = repo.GetAiCenterNameByCode(resourceQueue.AiCenterCode, ctx.Language()) + aiCenterCodeMap[resourceQueue.AiCenterCode] = resourceQueue.AiCenterCode + Resource = append(Resource, resourceQueue) + } + } + ctx.JSON(http.StatusOK, map[string]interface{}{ + "resourceQueues": Resource, + }) +} diff --git a/routers/api/v1/repo/mlops.go b/routers/api/v1/repo/mlops.go index 43969330d..322edc3e5 100644 --- a/routers/api/v1/repo/mlops.go +++ b/routers/api/v1/repo/mlops.go @@ -69,3 +69,17 @@ func GetRight(ctx *context.APIContext) { }) } + +func GetCloudBrainJobId(ctx *context.APIContext) { + cloudbrains, err := models.GetCloudbrainsByDisplayJobName(ctx.Repo.Repository.ID, ctx.Query("jobType"), ctx.Query("name")) + if err != nil { + log.Warn("get cloudbrain by display name failed", err) + ctx.JSON(http.StatusOK, map[string]string{"jobId": ""}) + return + } + if len(cloudbrains) > 0 { + ctx.JSON(http.StatusOK, map[string]string{"jobId": cloudbrains[0].JobID}) + return + } + ctx.JSON(http.StatusOK, map[string]string{"jobId": ""}) +} diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go index 16e4997a3..127ddd835 100755 --- a/routers/api/v1/repo/modelarts.go +++ b/routers/api/v1/repo/modelarts.go @@ -6,6 +6,7 @@ package repo import ( + "code.gitea.io/gitea/modules/cloudbrain" "encoding/json" "net/http" "path" @@ -37,11 +38,14 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { ) ID := ctx.Params(":id") - job, err := models.GetCloudbrainByID(ID) + + job,err := cloudbrain.GetCloudBrainByIdOrJobId(ID) + if err != nil { ctx.NotFound(err) return } + err = modelarts.HandleNotebookInfo(job) if err != nil { ctx.NotFound(err) @@ -146,7 +150,6 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { if len(result.JobInfo.Tasks) > 0 { if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 { job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] - // aiCenterName = result.JobInfo.Tasks[0].CenterName[0] aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context) } } diff --git a/routers/api/v1/repo/modelmanage.go b/routers/api/v1/repo/modelmanage.go index 15260790d..3b0aed0d5 100644 --- a/routers/api/v1/repo/modelmanage.go +++ b/routers/api/v1/repo/modelmanage.go @@ -43,8 +43,14 @@ func QueryModelById(ctx *context.APIContext) { routerRepo.QueryModelById(ctx.Context) } +func QueryModelByName(ctx *context.APIContext) { + log.Info("QueryModelByName by api.") + routerRepo.ShowSingleModel(ctx.Context) +} + func QueryModelListForPredict(ctx *context.APIContext) { log.Info("QueryModelListForPredict by api.") + ctx.Context.SetParams("isOnlyThisRepo", "true") routerRepo.QueryModelListForPredict(ctx.Context) } @@ -88,6 +94,11 @@ func CreateModelConvert(ctx *context.APIContext) { routerRepo.SaveModelConvert(ctx.Context) } +func StopModelConvert(ctx *context.APIContext) { + log.Info("StopModelConvert by api.") + routerRepo.StopModelConvertApi(ctx.Context) +} + func ShowModelConvertPage(ctx *context.APIContext) { log.Info("ShowModelConvertPage by api.") modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context) @@ -113,3 +124,12 @@ func QueryModelConvertById(ctx *context.APIContext) { ctx.JSON(http.StatusOK, nil) } } + +func QueryModelConvertByName(ctx *context.APIContext) { + modelResult, err := routerRepo.GetModelConvertByName(ctx.Context) + if err == nil { + ctx.JSON(http.StatusOK, modelResult) + } else { + ctx.JSON(http.StatusOK, nil) + } +} diff --git a/routers/home.go b/routers/home.go index aab760611..092b30fe3 100755 --- a/routers/home.go +++ b/routers/home.go @@ -7,6 +7,7 @@ package routers import ( "bytes" + "encoding/json" "net/http" "strconv" "strings" @@ -672,7 +673,7 @@ func NotFound(ctx *context.Context) { } func getRecommendOrg() ([]map[string]interface{}, error) { - url := setting.RecommentRepoAddr + "organizations" + url := setting.RecommentRepoAddr + "home/organizations" result, err := repository.RecommendFromPromote(url) if err != nil { @@ -745,7 +746,7 @@ func GetMapInfo(ctx *context.Context) { } func GetRankUser(index string) ([]map[string]interface{}, error) { - url := setting.RecommentRepoAddr + "user_rank_" + index + url := setting.RecommentRepoAddr + "user_rank/user_rank_" + index result, err := repository.RecommendFromPromote(url) if err != nil { @@ -756,13 +757,25 @@ func GetRankUser(index string) ([]map[string]interface{}, error) { tmpIndex := strings.Index(userRank, " ") userName := userRank score := 0 + label := "" if tmpIndex != -1 { userName = userRank[0:tmpIndex] - tmpScore, err := strconv.Atoi(userRank[tmpIndex+1:]) - if err != nil { - log.Info("convert to int error.") + left := userRank[tmpIndex+1:] + tmpIndex1 := strings.Index(left, " ") + if tmpIndex1 != -1 { + tmpScore, err := strconv.Atoi(left[0:tmpIndex1]) + if err != nil { + log.Info("convert to int error.") + } + score = tmpScore + label = left[tmpIndex1+1:] + } else { + tmpScore, err := strconv.Atoi(left[tmpIndex+1:]) + if err != nil { + log.Info("convert to int error.") + } + score = tmpScore } - score = tmpScore } user, err := models.GetUserByName(userName) if err == nil { @@ -772,6 +785,7 @@ func GetRankUser(index string) ([]map[string]interface{}, error) { userMap["FullName"] = user.FullName userMap["HomeLink"] = user.HomeLink() userMap["ID"] = user.ID + userMap["Label"] = label userMap["Avatar"] = user.RelAvatarLink() userMap["Score"] = score resultOrg = append(resultOrg, userMap) @@ -792,25 +806,54 @@ func GetUserRankFromPromote(ctx *context.Context) { ctx.JSON(200, resultUserRank) } +func getMapContent(fileName string) []map[string]string { + url := setting.RecommentRepoAddr + fileName + result, err := repository.RecommendContentFromPromote(url) + remap := make([]map[string]string, 0) + if err == nil { + json.Unmarshal([]byte(result), &remap) + } + return remap +} + +func HomeNoticeTmpl(ctx *context.Context) { + ctx.Data["url_params"] = "" + ctx.HTML(200, "notice") +} + func RecommendHomeInfo(ctx *context.Context) { resultOrg, err := getRecommendOrg() if err != nil { log.Info("error." + err.Error()) } - resultRepo, err := repository.GetRecommendRepoFromPromote("projects") + repoMap := getMapContent("home/projects") + resultRepo, err := repository.GetRecommendRepoFromPromote(repoMap) if err != nil { log.Info("error." + err.Error()) } - resultImage, err := getImageInfo("picture_info") - if err != nil { - log.Info("error." + err.Error()) - } - + resultActivityInfo := getMapContent("home/activity_info") mapInterface := make(map[string]interface{}) mapInterface["org"] = resultOrg mapInterface["repo"] = resultRepo - mapInterface["image"] = resultImage - //mapInterface["cloudbrain"] = resultCloudBrain + mapInterface["activity"] = resultActivityInfo + + user_experience := getMapContent("home/user_experience") + for _, amap := range user_experience { + userId := amap["userid"] + userIntId, _ := strconv.Atoi(userId) + user, err := models.GetUserByID(int64(userIntId)) + if err == nil { + amap["name"] = user.Name + amap["fullname"] = user.FullName + amap["detail"] = user.Description + amap["avatar"] = user.AvatarLink() + } + } + mapInterface["user_experience"] = user_experience + dataset, err := models.QueryDatasetGroupByTask() + if err == nil { + mapInterface["dataset"] = dataset + } ctx.JSON(http.StatusOK, mapInterface) } @@ -824,4 +867,4 @@ func HomePrivacy(ctx *context.Context) { func HomeResoruceDesc(ctx *context.Context) { ctx.HTML(200, tplResoruceDesc) -} \ No newline at end of file +} diff --git a/routers/private/internal.go b/routers/private/internal.go index 3d67afe8f..14b0f05de 100755 --- a/routers/private/internal.go +++ b/routers/private/internal.go @@ -55,6 +55,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration) m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter) m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec) + m.Post("/duration_statisctic/history_handle", repo.CloudbrainUpdateHistoryData) }, CheckInternalToken) } diff --git a/routers/repo/ai_model_convert.go b/routers/repo/ai_model_convert.go index 962c76aae..9839a5041 100644 --- a/routers/repo/ai_model_convert.go +++ b/routers/repo/ai_model_convert.go @@ -49,7 +49,7 @@ const ( //TensorFlowNpuBootFile = "convert_tensorflow.py" //TensorFlowGpuBootFile = "convert_tensorflow_gpu.py" - //ConvertRepoPath = "https://git.openi.org.cn/zouap/npu_test" + //ConvertRepoPath = "https://openi.pcl.ac.cn/zouap/npu_test" CONVERT_FORMAT_ONNX = 0 CONVERT_FORMAT_TRT = 1 @@ -573,13 +573,10 @@ func deleteCloudBrainTask(task *models.AiModelConvert) { } } -func StopModelConvert(ctx *context.Context) { - id := ctx.Params(":id") - log.Info("stop model convert start.id=" + id) +func stopModelConvert(id string) error { job, err := models.QueryModelConvertById(id) if err != nil { - ctx.ServerError("Not found task.", err) - return + return err } if job.IsGpuTrainTask() { err = cloudbrain.StopJob(job.CloudBrainTaskId) @@ -600,6 +597,35 @@ func StopModelConvert(ctx *context.Context) { err = models.UpdateModelConvert(job) if err != nil { log.Error("UpdateModelConvert failed:", err) + return err + } + return nil +} + +func StopModelConvertApi(ctx *context.Context) { + id := ctx.Params(":id") + log.Info("stop model convert start.id=" + id) + err := stopModelConvert(id) + if err == nil { + ctx.JSON(200, map[string]string{ + "code": "0", + "msg": "succeed", + }) + } else { + ctx.JSON(200, map[string]string{ + "code": "1", + "msg": err.Error(), + }) + } +} + +func StopModelConvert(ctx *context.Context) { + id := ctx.Params(":id") + log.Info("stop model convert start.id=" + id) + err := stopModelConvert(id) + if err != nil { + ctx.ServerError("Not found task.", err) + return } ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelmanage/convert_model") } @@ -620,7 +646,7 @@ func ShowModelConvertInfo(ctx *context.Context) { return } ctx.Data["Name"] = job.Name - ctx.Data["canDownload"] = isOper(ctx, job.UserId) + ctx.Data["canDownload"] = isOperModifyOrDelete(ctx, job.UserId) user, err := models.GetUserByID(job.UserId) if err == nil { job.UserName = user.Name @@ -732,6 +758,11 @@ func GetModelConvertById(ctx *context.Context) (*models.AiModelConvert, error) { return models.QueryModelConvertById(id) } +func GetModelConvertByName(ctx *context.Context) ([]*models.AiModelConvert, error) { + name := ctx.Query("name") + return models.QueryModelConvertByName(name, ctx.Repo.Repository.ID) +} + func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) { page := ctx.QueryInt("page") if page <= 0 { @@ -755,7 +786,7 @@ func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, in } userIds := make([]int64, len(modelResult)) for i, model := range modelResult { - model.IsCanOper = isOper(ctx, model.UserId) + model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) model.IsCanDelete = isCanDelete(ctx, model.UserId) userIds[i] = model.UserId } @@ -828,5 +859,4 @@ func ModelConvertDownloadModel(ctx *context.Context) { http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect) } } - } diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go index 7eedb9bc4..6e6889c32 100644 --- a/routers/repo/ai_model_manage.go +++ b/routers/repo/ai_model_manage.go @@ -93,7 +93,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio log.Info("accuracyJson=" + string(accuracyJson)) aiTask.ContainerIp = "" aiTaskJson, _ := json.Marshal(aiTask) - + isPrivate := ctx.QueryBool("isPrivate") model := &models.AiModelManage{ ID: id, Version: version, @@ -114,6 +114,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio TrainTaskInfo: string(aiTaskJson), Accuracy: string(accuracyJson), Status: STATUS_COPY_MODEL, + IsPrivate: isPrivate, } err = models.SaveModelToDb(model) @@ -216,6 +217,7 @@ func SaveLocalModel(ctx *context.Context) { description := ctx.Query("description") engine := ctx.QueryInt("engine") taskType := ctx.QueryInt("type") + isPrivate := ctx.QueryBool("isPrivate") modelActualPath := "" if taskType == models.TypeCloudBrainOne { destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/" @@ -262,6 +264,7 @@ func SaveLocalModel(ctx *context.Context) { TrainTaskInfo: "", Accuracy: "", Status: STATUS_FINISHED, + IsPrivate: isPrivate, } err := models.SaveModelToDb(model) @@ -554,20 +557,6 @@ func deleteModelByID(ctx *context.Context, id string) error { return err } -func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, int64, error) { - - return models.QueryModel(&models.AiModelQueryOptions{ - ListOptions: models.ListOptions{ - Page: page, - PageSize: setting.UI.IssuePagingNum, - }, - RepoID: repoId, - Type: -1, - New: MODEL_LATEST, - Status: -1, - }) -} - func DownloadMultiModelFile(ctx *context.Context) { log.Info("DownloadMultiModelFile start.") id := ctx.Query("id") @@ -578,7 +567,7 @@ func DownloadMultiModelFile(ctx *context.Context) { ctx.ServerError("no such model:", err) return } - if !isOper(ctx, task.UserId) { + if !isCanDownload(ctx, task) { ctx.NotFound(ctx.Req.URL.RequestURI(), nil) return } @@ -806,7 +795,7 @@ func DownloadSingleModelFile(ctx *context.Context) { ctx.ServerError("no such model:", err) return } - if !isOper(ctx, task.UserId) { + if !isCanDownload(ctx, task) { ctx.NotFound(ctx.Req.URL.RequestURI(), nil) return } @@ -874,8 +863,9 @@ func QueryModelById(ctx *context.Context) { id := ctx.Query("id") model, err := models.QueryModelById(id) if err == nil { - model.IsCanOper = isOper(ctx, model.UserId) + model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) model.IsCanDelete = isCanDelete(ctx, model.UserId) + model.IsCanDownload = isCanDownload(ctx, model) removeIpInfo(model) ctx.JSON(http.StatusOK, model) } else { @@ -891,7 +881,8 @@ func ShowSingleModel(ctx *context.Context) { userIds := make([]int64, len(models)) for i, model := range models { - model.IsCanOper = isOper(ctx, model.UserId) + model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) + model.IsCanDownload = isCanDownload(ctx, model) model.IsCanDelete = isCanDelete(ctx, model.UserId) userIds[i] = model.UserId } @@ -941,7 +932,8 @@ func ShowOneVersionOtherModel(ctx *context.Context) { userIds := make([]int64, len(aimodels)) for i, model := range aimodels { - model.IsCanOper = isOper(ctx, model.UserId) + model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) + model.IsCanDownload = isCanDownload(ctx, model) model.IsCanDelete = isCanDelete(ctx, model.UserId) userIds[i] = model.UserId } @@ -964,6 +956,7 @@ func ShowOneVersionOtherModel(ctx *context.Context) { } func SetModelCount(ctx *context.Context) { + isQueryPrivate := isQueryPrivateModel(ctx) repoId := ctx.Repo.Repository.ID Type := -1 _, count, _ := models.QueryModel(&models.AiModelQueryOptions{ @@ -971,10 +964,12 @@ func SetModelCount(ctx *context.Context) { Page: 1, PageSize: 2, }, - RepoID: repoId, - Type: Type, - New: MODEL_LATEST, - Status: -1, + RepoID: repoId, + Type: Type, + New: MODEL_LATEST, + IsOnlyThisRepo: true, + Status: -1, + IsQueryPrivate: isQueryPrivate, }) ctx.Data["MODEL_COUNT"] = count } @@ -1001,27 +996,87 @@ func isQueryRight(ctx *context.Context) bool { } } +func isCanDownload(ctx *context.Context, task *models.AiModelManage) bool { + if ctx.User == nil { + return false + } + isCollaborator, err := ctx.Repo.Repository.IsCollaborator(ctx.User.ID) + if err != nil { + log.Info("query error.") + } + isTeamMember, err := ctx.Repo.Repository.IsInRepoTeam(ctx.User.ID) + if err != nil { + log.Info("query IsInRepoTeam error." + err.Error()) + } + if ctx.User.IsAdmin || ctx.User.ID == task.UserId || isCollaborator || isTeamMember { + return true + } + if ctx.Repo.IsOwner() { + return true + } + if !task.IsPrivate { + return true + } + return false +} + +func isQueryPrivateModel(ctx *context.Context) bool { + if ctx.User == nil { + return false + } + isCollaborator, err := ctx.Repo.Repository.IsCollaborator(ctx.User.ID) + if err != nil { + log.Info("query IsCollaborator error." + err.Error()) + } + isTeamMember, err := ctx.Repo.Repository.IsInRepoTeam(ctx.User.ID) + if err != nil { + log.Info("query IsInRepoTeam error." + err.Error()) + } + if ctx.User.IsAdmin || isCollaborator || isTeamMember { + return true + } + if ctx.Repo.IsOwner() { + return true + } + return false +} + func isCanDelete(ctx *context.Context, modelUserId int64) bool { if ctx.User == nil { return false } - if ctx.User.IsAdmin || ctx.User.ID == modelUserId { + if ctx.User.ID == modelUserId { + return true + } + return isAdminRight(ctx) +} + +func isAdminRight(ctx *context.Context) bool { + if ctx.User.IsAdmin { return true } if ctx.Repo.IsOwner() { return true } + permission, err := models.GetUserRepoPermission(ctx.Repo.Repository, ctx.User) + if err != nil { + log.Error("GetUserRepoPermission failed:%v", err.Error()) + return false + } + if permission.AccessMode >= models.AccessModeAdmin { + return true + } return false } -func isOper(ctx *context.Context, modelUserId int64) bool { +func isOperModifyOrDelete(ctx *context.Context, modelUserId int64) bool { if ctx.User == nil { return false } if ctx.User.IsAdmin || ctx.User.ID == modelUserId { return true } - return false + return isAdminRight(ctx) } func ShowModelPageInfo(ctx *context.Context) { @@ -1038,6 +1093,7 @@ func ShowModelPageInfo(ctx *context.Context) { if pageSize <= 0 { pageSize = setting.UI.IssuePagingNum } + isQueryPrivate := isQueryPrivateModel(ctx) repoId := ctx.Repo.Repository.ID Type := -1 modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{ @@ -1045,10 +1101,12 @@ func ShowModelPageInfo(ctx *context.Context) { Page: page, PageSize: pageSize, }, - RepoID: repoId, - Type: Type, - New: MODEL_LATEST, - Status: -1, + RepoID: repoId, + Type: Type, + New: MODEL_LATEST, + IsOnlyThisRepo: true, + Status: -1, + IsQueryPrivate: isQueryPrivate, }) if err != nil { ctx.ServerError("Cloudbrain", err) @@ -1057,8 +1115,9 @@ func ShowModelPageInfo(ctx *context.Context) { userIds := make([]int64, len(modelResult)) for i, model := range modelResult { - model.IsCanOper = isOper(ctx, model.UserId) + model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) model.IsCanDelete = isCanDelete(ctx, model.UserId) + model.IsCanDownload = isCanDownload(ctx, model) userIds[i] = model.UserId } @@ -1089,6 +1148,37 @@ func ModifyModel(id string, description string) error { return err } +func ModifyModelPrivate(ctx *context.Context) { + id := ctx.Query("id") + isPrivate := ctx.QueryBool("isPrivate") + re := map[string]string{ + "code": "-1", + } + task, err := models.QueryModelById(id) + if err != nil || task == nil { + re["msg"] = err.Error() + log.Error("no such model!", err.Error()) + ctx.JSON(200, re) + return + } + if !isOperModifyOrDelete(ctx, task.UserId) { + re["msg"] = "No right to operation." + ctx.JSON(200, re) + return + } + err = models.ModifyModelPrivate(id, isPrivate) + if err == nil { + re["code"] = "0" + ctx.JSON(200, re) + log.Info("modify success.") + } else { + re["msg"] = err.Error() + ctx.JSON(200, re) + log.Info("Failed to modify.id=" + id + " isprivate=" + fmt.Sprint(isPrivate) + " error:" + err.Error()) + } + +} + func ModifyModelInfo(ctx *context.Context) { log.Info("modify model start.") id := ctx.Query("id") @@ -1102,7 +1192,7 @@ func ModifyModelInfo(ctx *context.Context) { ctx.JSON(200, re) return } - if !isOper(ctx, task.UserId) { + if !isOperModifyOrDelete(ctx, task.UserId) { re["msg"] = "No right to operation." ctx.JSON(200, re) return @@ -1112,6 +1202,7 @@ func ModifyModelInfo(ctx *context.Context) { label := ctx.Query("label") description := ctx.Query("description") engine := ctx.QueryInt("engine") + isPrivate := ctx.QueryBool("isPrivate") aimodels := models.QueryModelByName(name, task.RepoId) if aimodels != nil && len(aimodels) > 0 { if len(aimodels) == 1 { @@ -1126,14 +1217,14 @@ func ModifyModelInfo(ctx *context.Context) { return } } - err = models.ModifyLocalModel(id, name, label, description, engine) + err = models.ModifyLocalModel(id, name, label, description, engine, isPrivate) } else { label := ctx.Query("label") description := ctx.Query("description") engine := task.Engine name := task.Name - err = models.ModifyLocalModel(id, name, label, description, int(engine)) + err = models.ModifyLocalModel(id, name, label, description, int(engine), task.IsPrivate) } if err != nil { @@ -1148,15 +1239,27 @@ func ModifyModelInfo(ctx *context.Context) { func QueryModelListForPredict(ctx *context.Context) { repoId := ctx.Repo.Repository.ID + page := ctx.QueryInt("page") + if page <= 0 { + page = -1 + } + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = -1 + } + isQueryPrivate := isQueryPrivateModel(ctx) + //IsOnlyThisRepo := ctx.QueryBool("isOnlyThisRepo") modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{ ListOptions: models.ListOptions{ - Page: -1, - PageSize: -1, + Page: page, + PageSize: pageSize, }, - RepoID: repoId, - Type: ctx.QueryInt("type"), - New: -1, - Status: 0, + RepoID: repoId, + Type: ctx.QueryInt("type"), + New: -1, + Status: 0, + IsOnlyThisRepo: true, + IsQueryPrivate: isQueryPrivate, }) if err != nil { ctx.ServerError("Cloudbrain", err) @@ -1168,7 +1271,9 @@ func QueryModelListForPredict(ctx *context.Context) { nameMap := make(map[string][]*models.AiModelManage) for _, model := range modelResult { - removeIpInfo(model) + model.TrainTaskInfo = "" + model.Accuracy = "" + //removeIpInfo(model) if _, value := nameMap[model.Name]; !value { models := make([]*models.AiModelManage, 0) models = append(models, model) diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go index b638a486b..6176fcda5 100644 --- a/routers/repo/aisafety.go +++ b/routers/repo/aisafety.go @@ -11,7 +11,8 @@ import ( "os" "strconv" "strings" - "time" + + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/aisafety" @@ -483,7 +484,6 @@ func isTaskNotFinished(status string) bool { } func AiSafetyCreateForGetGPU(ctx *context.Context) { - t := time.Now() ctx.Data["PageIsCloudBrain"] = true ctx.Data["IsCreate"] = true ctx.Data["type"] = models.TypeCloudBrainOne @@ -497,7 +497,7 @@ func AiSafetyCreateForGetGPU(ctx *context.Context) { log.Info("GPUBaseDataSetUUID=" + setting.ModelSafetyTest.GPUBaseDataSetUUID) log.Info("GPUCombatDataSetName=" + setting.ModelSafetyTest.GPUCombatDataSetName) log.Info("GPUCombatDataSetUUID=" + setting.ModelSafetyTest.GPUCombatDataSetUUID) - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName prepareCloudbrainOneSpecs(ctx) queuesDetail, _ := cloudbrain.GetQueuesDetail() @@ -514,12 +514,11 @@ func AiSafetyCreateForGetGPU(ctx *context.Context) { } func AiSafetyCreateForGetNPU(ctx *context.Context) { - t := time.Now() ctx.Data["PageIsCloudBrain"] = true ctx.Data["IsCreate"] = true ctx.Data["type"] = models.TypeCloudBrainTwo ctx.Data["compute_resource"] = models.NPUResource - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName ctx.Data["datasetType"] = models.TypeCloudBrainTwo ctx.Data["BaseDataSetName"] = setting.ModelSafetyTest.NPUBaseDataSetName diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 7d96d1b58..a23cd5462 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -15,6 +15,8 @@ import ( "time" "unicode/utf8" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/modules/urfs_client/urchin" "code.gitea.io/gitea/modules/dataset" @@ -92,28 +94,9 @@ func MustEnableCloudbrain(ctx *context.Context) { } } -func cutString(str string, lens int) string { - if len(str) < lens { - return str - } - return str[:lens] -} - -func jobNamePrefixValid(s string) string { - lowStr := strings.ToLower(s) - re := regexp.MustCompile(`[^a-z0-9_\\-]+`) - - removeSpecial := re.ReplaceAllString(lowStr, "") - - re = regexp.MustCompile(`^[_\\-]+`) - return re.ReplaceAllString(removeSpecial, "") - -} - func cloudBrainNewDataPrepare(ctx *context.Context, jobType string) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName ctx.Data["command"] = cloudbrain.GetCloudbrainDebugCommand() @@ -687,6 +670,13 @@ func CloudBrainRestart(ctx *context.Context) { break } + if _, err := os.Stat(getOldJobPath(task)); err != nil { + log.Error("Can not find job minio path", err) + resultCode = "-1" + errorMsg = ctx.Tr("cloudbrain.result_cleared") + break + } + count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, string(models.JobTypeDebug)) if err != nil { log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"]) @@ -696,7 +686,7 @@ func CloudBrainRestart(ctx *context.Context) { } else { if count >= 1 { log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) - resultCode = "-1" + resultCode = "2" errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob") break } @@ -721,6 +711,11 @@ func CloudBrainRestart(ctx *context.Context) { }) } + +func getOldJobPath(task *models.Cloudbrain) string { + return setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + task.JobName +} + func CloudBrainBenchMarkShow(ctx *context.Context) { cloudBrainShow(ctx, tplCloudBrainBenchmarkShow, models.JobTypeBenchmark) } @@ -759,43 +754,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo return } if task.Status == string(models.JobWaiting) || task.Status == string(models.JobRunning) { - result, err := cloudbrain.GetJob(task.JobID) + task, err = cloudbrainTask.SyncCloudBrainOneStatus(task) if err != nil { log.Info("error:" + err.Error()) ctx.NotFound(ctx.Req.URL.RequestURI(), nil) return } - if result != nil { - jobRes, _ := models.ConvertToJobResultPayload(result.Payload) - taskRoles := jobRes.TaskRoles - taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{})) - ctx.Data["taskRes"] = taskRes - ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics - oldStatus := task.Status - task.Status = taskRes.TaskStatuses[0].State - task.ContainerIp = "" - task.ContainerID = taskRes.TaskStatuses[0].ContainerID - models.ParseAndSetDurationFromCloudBrainOne(jobRes, task) - - if task.DeletedAt.IsZero() { //normal record - if oldStatus != task.Status { - notification.NotifyChangeCloudbrainStatus(task, oldStatus) - } - err = models.UpdateJob(task) - if err != nil { - ctx.Data["error"] = err.Error() - return - } - } else { //deleted record - - } - - ctx.Data["result"] = jobRes - } else { - log.Info("error:" + err.Error()) - return - } } user, err := models.GetUserByID(task.UserID) @@ -889,7 +854,13 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo func CloudBrainDebug(ctx *context.Context) { task := ctx.Cloudbrain debugUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName - ctx.Redirect(debugUrl) + if task.BootFile!=""{ + ctx.Redirect(getFileUrl(debugUrl,task.BootFile)) + + }else{ + ctx.Redirect(debugUrl) + } + } func prepareSpec4Show(ctx *context.Context, task *models.Cloudbrain) { diff --git a/routers/repo/cloudbrain_statistic.go b/routers/repo/cloudbrain_statistic.go index 3814c2daf..6ff377491 100644 --- a/routers/repo/cloudbrain_statistic.go +++ b/routers/repo/cloudbrain_statistic.go @@ -1,118 +1,180 @@ package repo import ( + "net/http" "strings" "time" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" ) func CloudbrainDurationStatisticHour() { + if setting.IsCloudbrainTimingEnabled { + var statisticTime time.Time + var count int64 + recordDurationUpdateTime, err := models.GetDurationRecordUpdateTime() + if err != nil { + log.Error("Can not get GetDurationRecordBeginTime", err) + } + now := time.Now() + currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location()) + if err == nil && len(recordDurationUpdateTime) > 0 { + statisticTime = time.Unix(int64(recordDurationUpdateTime[0].DateTimeUnix), 0).Add(+1 * time.Hour) + } else { + statisticTime = currentTime + } - dateTime := time.Now().Format("2006-01-02 15:04:05") - dayTime := time.Now().Format("2006-01-02") - now := time.Now() + err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Add(-1*time.Hour).Unix()), timeutil.TimeStamp(currentTime.Unix())) + if err != nil { + log.Error("DeleteCloudbrainDurationStatistic failed", err) + } - currentTime := time.Date(now.Year(), now.Month(), now.Day(), now.Hour(), 0, 0, 0, now.Location()) + for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) { + countEach := summaryDurationStat(statisticTime) + count += countEach + statisticTime = statisticTime.Add(+1 * time.Hour) + } + log.Info("summaryDurationStat count: %v", count) + } +} +func UpdateDurationStatisticHistoryData(beginTime time.Time, endTime time.Time) int64 { + var count int64 + statisticTime := beginTime + currentTime := endTime + for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) { + countEach := summaryDurationStat(statisticTime) + count += countEach + statisticTime = statisticTime.Add(+1 * time.Hour) + } + return count +} - m, _ := time.ParseDuration("-1h") - beginTime := currentTime.Add(m).Unix() - endTime := currentTime.Unix() - hourTime := currentTime.Add(m).Hour() +//statisticTime是当前的时辰,比如当前是2019-01-01 12:01:01,那么statisticTime就是2019-01-01 12:00:00 +func summaryDurationStat(statisticTime time.Time) int64 { + var count int64 + dateTimeUnix := timeutil.TimeStamp(statisticTime.Add(-1 * time.Hour).Unix()) + beginTime := statisticTime.Add(-1 * time.Hour).Unix() + dayTime := statisticTime.Add(-1 * time.Hour).Format("2006-01-02") + hourTime := statisticTime.Add(-1 * time.Hour).Hour() + endTime := statisticTime.Unix() ciTasks, err := models.GetCloudbrainByTime(beginTime, endTime) if err != nil { log.Info("GetCloudbrainByTime err: %v", err) - return + return 0 } - specMap := make(map[string]*models.Specification) models.LoadSpecs4CloudbrainInfo(ciTasks) - for _, cloudbrain := range ciTasks { - if _, ok := specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { - if cloudbrain.Cloudbrain.Spec != nil { - specMap[cloudbrain.Cloudbrain.Spec.AiCenterCode+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = cloudbrain.Cloudbrain.Spec - } - } - } - - cloudBrainCenterCodeAndCardTypeInfo := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime) + cloudBrainCenterCodeAndCardTypeInfo, cloudbrainMap := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime) resourceQueues, err := models.GetCanUseCardInfo() if err != nil { log.Info("GetCanUseCardInfo err: %v", err) - return + return 0 } + cardsTotalDurationMap := make(map[string]int) for _, resourceQueue := range resourceQueues { - cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterName+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType+"/"+resourceQueue.ComputeResource] = resourceQueue.CardsTotalNum * 1 * 60 * 60 + if _, ok := cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType]; !ok { + cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] = resourceQueue.CardsTotalNum * 1 * 60 * 60 + } else { + cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] += resourceQueue.CardsTotalNum * 1 * 60 * 60 + } } - for centerCode, CardTypeInfo := range cloudBrainCenterCodeAndCardTypeInfo { - for cardType, cardDuration := range CardTypeInfo { - spec := specMap[centerCode+"/"+cardType] - if spec != nil { - if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, centerCode, cardType); err != nil { - log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error()) - return - } - if _, ok := cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource]; !ok { - cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource] = 0 + for centerCode, CardTypes := range cloudBrainCenterCodeAndCardTypeInfo { + for cardType, cardDuration := range CardTypes { + cloudbrainTable := cloudbrainMap[centerCode+"/"+cardType] + if cloudbrainTable != nil { + if _, ok := cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType]; !ok { + cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType] = 0 } cloudbrainDurationStat := models.CloudbrainDurationStatistic{ - DateTime: dateTime, + DateTimeUnix: dateTimeUnix, DayTime: dayTime, HourTime: hourTime, - Cluster: spec.Cluster, - AiCenterName: spec.AiCenterName, + Cluster: cloudbrainTable.Cluster, + AiCenterName: GetAiCenterNameByCode(centerCode, "zh-CN"), AiCenterCode: centerCode, AccCardType: cardType, - ComputeResource: spec.ComputeResource, CardsUseDuration: cardDuration, - CardsTotalDuration: cardsTotalDurationMap[spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource], + CardsTotalDuration: cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType], CreatedUnix: timeutil.TimeStampNow(), } if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil { log.Error("Insert cloudbrainDurationStat failed: %v", err.Error()) } - delete(cardsTotalDurationMap, spec.Cluster+"/"+spec.AiCenterName+"/"+centerCode+"/"+cardType+"/"+spec.ComputeResource) + count++ + delete(cardsTotalDurationMap, cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType) } } } for key, cardsTotalDuration := range cardsTotalDurationMap { - if err := models.DeleteCloudbrainDurationStatisticHour(dayTime, hourTime, strings.Split(key, "/")[2], strings.Split(key, "/")[3]); err != nil { - log.Error("DeleteCloudbrainDurationStatisticHour failed: %v", err.Error()) - return - } cloudbrainDurationStat := models.CloudbrainDurationStatistic{ - DateTime: dateTime, + DateTimeUnix: dateTimeUnix, DayTime: dayTime, HourTime: hourTime, Cluster: strings.Split(key, "/")[0], - AiCenterName: strings.Split(key, "/")[1], - AiCenterCode: strings.Split(key, "/")[2], - AccCardType: strings.Split(key, "/")[3], - ComputeResource: strings.Split(key, "/")[4], + AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"), + AiCenterCode: strings.Split(key, "/")[1], + AccCardType: strings.Split(key, "/")[2], CardsUseDuration: 0, CardsTotalDuration: cardsTotalDuration, + CardsTotalNum: cardsTotalDuration / 1 / 60 / 60, CreatedUnix: timeutil.TimeStampNow(), } if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil { log.Error("Insert cloudbrainDurationStat failed: %v", err.Error()) } + count++ } log.Info("finish summary cloudbrainDurationStat") + return count +} + +func GetAiCenterNameByCode(centerCode string, language string) string { + var aiCenterName string + aiCenterInfo := cloudbrainService.GetAiCenterInfoByCenterCode(centerCode) + if aiCenterInfo != nil { + if language == "zh-CN" { + aiCenterName = aiCenterInfo.Content + } else { + aiCenterName = aiCenterInfo.ContentEN + } + } else { + aiCenterName = centerCode + } + return aiCenterName } -func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) map[string]map[string]int { +func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) (map[string]map[string]int, map[string]*models.Cloudbrain) { var WorkServerNumber int var AccCardsNum int + cloudbrainMap := make(map[string]*models.Cloudbrain) cloudBrainCenterCodeAndCardType := make(map[string]map[string]int) for _, cloudbrain := range ciTasks { + if cloudbrain.Cloudbrain.StartTime == 0 { + cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix + } + if cloudbrain.Cloudbrain.EndTime == 0 { + cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix()) + } + cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) + if cloudbrain.Cloudbrain.Spec != nil { + if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { + if cloudbrain.Cloudbrain.Spec != nil { + cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain + } + } + } + cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain) if cloudbrain.Cloudbrain.StartTime == 0 { cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix } @@ -129,41 +191,77 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be } else { AccCardsNum = cloudbrain.Cloudbrain.Spec.AccCardsNum } - if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode]; !ok { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode] = make(map[string]int) + if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter]; !ok { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter] = make(map[string]int) } - - if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) { - if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { - if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + if cloudbrain.Cloudbrain.Spec != nil { + if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) && cloudbrain.Cloudbrain.DeletedAt.IsZero() { + if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { + if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = 0 + } } else { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += 0 + } } } else { - if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { + if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) + } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + } } else { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) + } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) + } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime { + cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime)) + } } } - } else { - if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok { - if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) - } else { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) - } - } else { - if int64(cloudbrain.Cloudbrain.StartTime) < beginTime { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime)) - } else { - cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Spec.AiCenterCode][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime)) - } - } - } } - return cloudBrainCenterCodeAndCardType + return cloudBrainCenterCodeAndCardType, cloudbrainMap +} + +func CloudbrainUpdateHistoryData(ctx *context.Context) { + beginTimeStr := ctx.QueryTrim("beginTime") + endTimeStr := ctx.QueryTrim("endTime") + var count int64 + var err error + if beginTimeStr != "" && endTimeStr != "" { + beginTime, _ := time.ParseInLocation("2006-01-02 15:04:05", beginTimeStr, time.Local) + endTime, _ := time.ParseInLocation("2006-01-02 15:04:05", endTimeStr, time.Local) + if time.Now().Before(endTime) { + endTime = time.Now() + } + beginTimeUnix := timeutil.TimeStamp(beginTime.Unix()) + endTimeUnix := timeutil.TimeStamp(endTime.Unix()) + + err = models.DeleteCloudbrainDurationStatistic(beginTimeUnix, endTimeUnix) + count = UpdateDurationStatisticHistoryData(beginTime.Add(+1*time.Hour), endTime.Add(+1*time.Hour)) + } + ctx.JSON(http.StatusOK, map[string]interface{}{ + "message": 0, + "count": count, + "err": err, + }) } diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index 581a1fbfb..8f3182758 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -10,7 +10,6 @@ import ( "path" "strconv" "strings" - "time" "code.gitea.io/gitea/modules/urfs_client/urchin" "code.gitea.io/gitea/routers/response" @@ -77,8 +76,7 @@ func GrampusTrainJobNPUNew(ctx *context.Context) { func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName //get valid images @@ -1144,7 +1142,7 @@ func HandleTaskWithAiCenter(ctx *context.Context) { log.Error("GetJob failed:" + err.Error()) continue } - if result != nil { + if len(result.JobInfo.Tasks) != 0 { if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 { task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] } diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index fabf7e555..4e30e625d 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -15,6 +15,8 @@ import ( "time" "unicode/utf8" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/services/cloudbrain/cloudbrainTask" "code.gitea.io/gitea/modules/dataset" @@ -128,8 +130,7 @@ func NotebookNew(ctx *context.Context) { func notebookNewDataPrepare(ctx *context.Context) error { ctx.Data["PageIsCloudBrain"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsUserAttachments(ctx.User.ID) @@ -239,9 +240,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm } if setting.ModelartsCD.Enabled { - err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec) + _, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs) } else { - err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec) + _, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs) } if err != nil { @@ -387,8 +388,31 @@ func NotebookDebug2(ctx *context.Context) { ctx.RenderWithErr(err.Error(), tplModelArtsNotebookIndex, nil) return } + if task.BootFile != "" { + ctx.Redirect(getFileUrl(result.Url, task.BootFile) + "?token=" + result.Token) + } else { + ctx.Redirect(result.Url + "?token=" + result.Token) + } - ctx.Redirect(result.Url + "?token=" + result.Token) +} + +func getFileUrl(url string, filename string) string { + middle := "" + if url[len(url)-3:] == "lab" || url[len(url)-4:] == "lab/" { + if url[len(url)-1] == '/' { + middle = "tree/" + } else { + middle = "/tree/" + } + } else { + if url[len(url)-1] == '/' { + middle = "lab/tree/" + } else { + middle = "/lab/tree/" + } + } + + return url + middle + path.Base(filename) } func NotebookRestart(ctx *context.Context) { @@ -420,7 +444,8 @@ func NotebookRestart(ctx *context.Context) { } else { if count >= 1 { log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) - errorMsg = "you have already a running or waiting task, can not create more" + resultCode = "2" + errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob") break } } @@ -714,8 +739,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error { // return //} - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -2313,7 +2337,7 @@ func InferenceJobIndex(ctx *context.Context) { tasks[i].ComputeResource = models.NPUResource } } - + isQueryPrivate := isQueryPrivateModel(ctx) repoId := ctx.Repo.Repository.ID Type := -1 _, model_count, _ := models.QueryModel(&models.AiModelQueryOptions{ @@ -2321,10 +2345,12 @@ func InferenceJobIndex(ctx *context.Context) { Page: 1, PageSize: 2, }, - RepoID: repoId, - Type: Type, - New: MODEL_LATEST, - Status: 0, + RepoID: repoId, + Type: Type, + New: MODEL_LATEST, + IsOnlyThisRepo: true, + Status: 0, + IsQueryPrivate: isQueryPrivate, }) ctx.Data["MODEL_COUNT"] = model_count @@ -2351,8 +2377,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { ctx.Data["PageIsCloudBrain"] = true ctx.Data["newInference"] = true - t := time.Now() - var displayJobName = jobNamePrefixValid(cutString(ctx.User.Name, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] + var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name) ctx.Data["display_job_name"] = displayJobName attachs, err := models.GetModelArtsTrainAttachments(ctx.User.ID) @@ -2394,7 +2419,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { return err } ctx.Data["config_list"] = configList.ParaConfigs - + isQueryPrivate := isQueryPrivateModel(ctx) repoId := ctx.Repo.Repository.ID Type := -1 _, model_count, _ := models.QueryModel(&models.AiModelQueryOptions{ @@ -2402,10 +2427,12 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error { Page: 1, PageSize: 2, }, - RepoID: repoId, - Type: Type, - New: MODEL_LATEST, - Status: 0, + RepoID: repoId, + Type: Type, + New: MODEL_LATEST, + IsOnlyThisRepo: true, + Status: 0, + IsQueryPrivate: isQueryPrivate, }) ctx.Data["MODEL_COUNT"] = model_count ctx.Data["datasetType"] = models.TypeCloudBrainTwo diff --git a/routers/repo/repo.go b/routers/repo/repo.go index 2c8c2f45b..4919b2487 100644 --- a/routers/repo/repo.go +++ b/routers/repo/repo.go @@ -414,7 +414,9 @@ func Action(ctx *context.Context) { var err error switch ctx.Params(":action") { case "watch": - err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true) + err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true, models.ReceiveAllNotification) + case "watch_but_reject": + err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, true, models.RejectAllNotification) case "unwatch": err = models.WatchRepo(ctx.User.ID, ctx.Repo.Repository.ID, false) case "star": diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 508addf75..a6de283a4 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -907,3 +907,9 @@ func QueryUserLoginInfo(ctx *context.Context) { log.Info("writer exel error." + err.Error()) } } + +func QueryUserAnnualReport(ctx *context.Context) { + log.Info("start to QueryUserAnnualReport ") + result := models.QueryUserAnnualReport(ctx.User.ID) + ctx.JSON(http.StatusOK, result) +} diff --git a/routers/repo/user_invitation.go b/routers/repo/user_invitation.go index a2752a481..6e7207bce 100644 --- a/routers/repo/user_invitation.go +++ b/routers/repo/user_invitation.go @@ -49,9 +49,10 @@ func getInvitationDetailExcelHeader(ctx *context.Context) map[string]string { excelHeader := make([]string, 0) excelHeader = append(excelHeader, ctx.Tr("user.static.id")) excelHeader = append(excelHeader, ctx.Tr("user.static.name")) - excelHeader = append(excelHeader, ctx.Tr("user.static.srcUserId")) + excelHeader = append(excelHeader, ctx.Tr("user.static.email")) excelHeader = append(excelHeader, ctx.Tr("user.static.phone")) excelHeader = append(excelHeader, ctx.Tr("user.static.registdate")) + excelHeader = append(excelHeader, ctx.Tr("user.static.srcUserId")) excelHeaderMap := make(map[string]string, 0) var i byte @@ -92,8 +93,7 @@ func writeInvitationDetailExcel(row int, xlsx *excelize.File, sheetName string, tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name) tmp = tmp + 1 - - xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SrcUserID) + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email) tmp = tmp + 1 xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Phone) @@ -101,7 +101,9 @@ func writeInvitationDetailExcel(row int, xlsx *excelize.File, sheetName string, formatTime := userRecord.CreatedUnix.Format("2006-01-02 15:04:05") xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3]) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SrcUserID) } func DownloadInvitationDetail(ctx *context.Context) { @@ -413,6 +415,7 @@ func queryData(ctx *context.Context, startTime time.Time, endTime time.Time) { invi.Name = tmpUser.Name invi.Phone = tmpUser.PhoneNumber invi.CreatedUnix = tmpUser.CreatedUnix + invi.Email = tmpUser.Email } else { invi.Name = "已注销" } diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 2b361b507..80ff96364 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -359,6 +359,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/all/dosearch/", routers.SearchApi) m.Post("/user/login/kanban", user.SignInPostAPI) m.Get("/home/term", routers.HomeTerm) + m.Get("/home/notice", routers.HomeNoticeTmpl) m.Get("/home/privacy", routers.HomePrivacy) m.Get("/extension/tuomin/upload", modelapp.ProcessImageUI) m.Post("/extension/tuomin/upload", reqSignIn, modelapp.ProcessImage) @@ -518,6 +519,7 @@ func RegisterRoutes(m *macaron.Macaron) { // r.Get("/feeds", binding.Bind(auth.FeedsForm{}), user.Feeds) m.Any("/activate", user.Activate, reqSignIn) m.Any("/activate_email", user.ActivateEmail) + m.Post("/update_email", bindIgnErr(auth.UpdateEmailForm{}), user.UpdateEmailPost) m.Get("/avatar/:username/:size", user.Avatar) m.Get("/email2user", user.Email2User) m.Get("/recover_account", user.ResetPasswd) @@ -1249,6 +1251,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/delete_model_convert/:id", repo.DeleteModelConvert) m.Post("/convert_stop/:id", repo.StopModelConvert) m.Put("/modify_model", repo.ModifyModelInfo) + m.Put("/modify_model_status", repo.ModifyModelPrivate) m.Get("/show_model", reqRepoModelManageReader, repo.ShowModelTemplate) m.Get("/convert_model", reqRepoModelManageReader, repo.ConvertModelTemplate) m.Get("/show_model_info", repo.ShowModelInfo) @@ -1272,8 +1275,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Group("/modelsafety", func() { m.Group("/:id", func() { - m.Get("/show", reqRepoCloudBrainWriter, repo.GetAiSafetyTaskTmpl) - m.Get("", reqRepoCloudBrainWriter, repo.GetAiSafetyTask) + m.Get("/show", reqRepoCloudBrainReader, repo.GetAiSafetyTaskTmpl) + m.Get("", reqRepoCloudBrainReader, repo.GetAiSafetyTask) m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.StopAiSafetyTask) m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.DelAiSafetyTask) }) diff --git a/routers/user/Invitation.go b/routers/user/Invitation.go index 8491390b2..0eb8ae2f4 100644 --- a/routers/user/Invitation.go +++ b/routers/user/Invitation.go @@ -63,7 +63,7 @@ func InviationTpl(ctx *context.Context) { ctx.HTML(200, tplInvitation) } -func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhoneNumber string) error { +func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhoneNumber string, email string) error { user := parseInvitaionCode(invitationcode) if user == nil { return errors.New("The invitated user not existed.") @@ -85,6 +85,7 @@ func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhone SrcUserID: user.ID, UserID: newUserId, Phone: newPhoneNumber, + Email: email, } err := models.InsertInvitaion(invitation) diff --git a/routers/user/auth.go b/routers/user/auth.go index 57ffb1710..5314571d2 100755 --- a/routers/user/auth.go +++ b/routers/user/auth.go @@ -1368,7 +1368,7 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo log.Info("enter here, and form.InvitaionCode =" + invitationCode) if invitationCode != "" { - RegisteUserByInvitaionCode(invitationCode, u.ID, u.PhoneNumber) + RegisteUserByInvitaionCode(invitationCode, u.ID, u.PhoneNumber, u.Email) } err := models.AddEmailAddress(&models.EmailAddress{ @@ -1413,6 +1413,34 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo handleSignInFull(ctx, u, false, true) } +//update user emailAddress +func UpdateEmailPost(ctx *context.Context, form auth.UpdateEmailForm) { + newEmailAddress := ctx.Query("NewEmail") + if newEmailAddress == "" { + log.Error("please input the newEmail") + return + } + if used, _ := models.IsEmailUsed(newEmailAddress); used { + ctx.RenderWithErr(ctx.Tr("form.email_been_used"), TplActivate, &form) + return + } + user := ctx.User + email, err := models.GetEmailAddressByIDAndEmail(user.ID, user.Email) + if err != nil { + ctx.ServerError("GetEmailAddressByIDAndEmail failed", err) + return + } + err = email.UpdateEmailAddress(newEmailAddress) + if err != nil { + ctx.ServerError("UpdateEmailAddress failed", err) + return + } + ctx.Data["SignedUser.Email"] = newEmailAddress + ctx.User.Email = newEmailAddress + Activate(ctx) + +} + // Activate render activate user page func Activate(ctx *context.Context) { code := ctx.Query("code") diff --git a/routers/user/home.go b/routers/user/home.go index b6ab28f95..62b0357ad 100755 --- a/routers/user/home.go +++ b/routers/user/home.go @@ -23,6 +23,8 @@ import ( "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/repo" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" @@ -837,6 +839,8 @@ func Cloudbrains(ctx *context.Context) { } models.LoadSpecs4CloudbrainInfo(ciTasks) for i, _ := range ciTasks { + ciTasks[i] = cloudbrainService.UpdateCloudbrainAiCenter(ciTasks[i]) + ciTasks[i].Cloudbrain.AiCenter = repo.GetAiCenterNameByCode(ciTasks[i].Cloudbrain.AiCenter, ctx.Language()) ciTasks[i].CanDebug = true ciTasks[i].CanDel = true ciTasks[i].Cloudbrain.ComputeResource = ciTasks[i].ComputeResource diff --git a/services/cloudbrain/clear.go b/services/cloudbrain/clear.go new file mode 100644 index 000000000..44613ee3c --- /dev/null +++ b/services/cloudbrain/clear.go @@ -0,0 +1,151 @@ +package cloudbrain + +import ( + "io/ioutil" + "os" + "sort" + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" +) + +func ClearCloudbrainResultSpace() { + log.Info("clear cloudbrain one result space begin.") + if !setting.ClearStrategy.Enabled{ + return + } + + tasks, err := models.GetCloudBrainOneStoppedNotDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.BatchSize) + if err != nil { + log.Warn("Failed to get cloudbrain, clear result failed.", err) + return + } + debugTasks, err := models.GetCloudBrainOneStoppedDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.DebugJobSize) + if err != nil { + log.Warn("Failed to get debug cloudbrain.", err) + + } + tasks=append(tasks,debugTasks...) + + if err != nil { + log.Warn("Failed to get cloudbrain, clear result failed.", err) + return + } + var ids []int64 + for _, task := range tasks { + err := DeleteCloudbrainOneJobStorage(task.JobName) + if err == nil { + log.Info("clear job in cloudbrain table:"+task.JobName) + ids = append(ids, task.ID) + } + } + + err = models.UpdateCloudBrainRecordsCleared(ids) + if err != nil { + log.Warn("Failed to set cloudbrain cleared status", err) + } + //如果云脑表处理完了,通过遍历minio对象处理历史垃圾数据,如果存在的话 + if len(tasks) < setting.ClearStrategy.BatchSize+setting.ClearStrategy.DebugJobSize { + clearLocalHistoryTrashFile() + clearMinioHistoryTrashFile() + + } + log.Info("clear cloudbrain one result space end.") + +} + +func clearMinioHistoryTrashFile() { + JobRealPrefix := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + + miniofiles, err := ioutil.ReadDir(JobRealPrefix) + + processCount := 0 + if err != nil { + log.Warn("Can not browser minio job path.") + } else { + SortModTimeAscend(miniofiles) + for _, file := range miniofiles { + + if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) { + + has,err:=models.IsCloudbrainExistByJobName(file.Name()) + if err==nil && !has { + dirPath := setting.CBCodePathPrefix + file.Name() + "/" + log.Info("clear job in minio trash:" + file.Name()) + storage.Attachments.DeleteDir(dirPath) + processCount++ + } + if processCount == setting.ClearStrategy.BatchSize { + break + } + } else { + break + } + + } + + } +} + +func clearLocalHistoryTrashFile() { + files, err := ioutil.ReadDir(setting.JobPath) + processCount := 0 + if err != nil { + log.Warn("Can not browser local job path.") + } else { + SortModTimeAscend(files) + for _, file := range files { + //清理n天前的历史垃圾数据,清理job目录 + if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) { + has,err:=models.IsCloudbrainExistByJobName(file.Name()) + if err==nil && !has{ + os.RemoveAll(setting.JobPath + file.Name()) + log.Info("clear job in local trash:"+file.Name()) + processCount++ + } + if processCount == setting.ClearStrategy.BatchSize { + break + } + } else { + break + } + + } + + } + +} + +func SortModTimeAscend(files []os.FileInfo) { + sort.Slice(files, func(i, j int) bool { + return files[i].ModTime().Before(files[j].ModTime()) + }) +} + +func DeleteCloudbrainOneJobStorage(jobName string) error { + + if jobName==""{ + return nil + } + //delete local + localJobPath := setting.JobPath + jobName + err := os.RemoveAll(localJobPath) + if err != nil { + log.Error("RemoveAll(%s) failed:%v", localJobPath, err) + } + + dirPath := setting.CBCodePathPrefix + jobName + "/" + err1 := storage.Attachments.DeleteDir(dirPath) + + if err1 != nil { + log.Error("DeleteDir(%s) failed:%v", localJobPath, err) + } + if err == nil { + err = err1 + } + + return err +} diff --git a/services/cloudbrain/cloudbrainTask/notebook.go b/services/cloudbrain/cloudbrainTask/notebook.go new file mode 100644 index 000000000..6b2fcf707 --- /dev/null +++ b/services/cloudbrain/cloudbrainTask/notebook.go @@ -0,0 +1,362 @@ +package cloudbrainTask + +import ( + "fmt" + "net/http" + "path" + + "code.gitea.io/gitea/modules/modelarts" + "code.gitea.io/gitea/modules/modelarts_cd" + + "code.gitea.io/gitea/modules/git" + + "code.gitea.io/gitea/modules/cloudbrain" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/redis/redis_key" + "code.gitea.io/gitea/modules/redis/redis_lock" + "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/services/cloudbrain/resource" + "code.gitea.io/gitea/services/reward/point/account" + + "code.gitea.io/gitea/modules/setting" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + repo_service "code.gitea.io/gitea/services/repository" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + api "code.gitea.io/gitea/modules/structs" + "code.gitea.io/gitea/modules/util" +) + +const NoteBookExtension = ".ipynb" + +func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption) { + + if ctx.Written() { + return + } + + if path.Ext(option.File) != NoteBookExtension { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_select_wrong"))) + return + } + + isNotebookFileExist, _ := isNoteBookFileExist(ctx, option) + if !isNotebookFileExist { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + + sourceRepo, err := models.GetRepositoryByOwnerAndName(option.OwnerName, option.ProjectName) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + + permission, err := models.GetUserRepoPermission(sourceRepo, ctx.User) + if err != nil { + log.Error("Get permission failed", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_no_right"))) + return + } + + if !permission.CanRead(models.UnitTypeCode) { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_no_right"))) + return + } + + //create repo if not exist + repo, err := models.GetRepositoryByName(ctx.User.ID, setting.FileNoteBook.ProjectName) + if repo == nil { + repo, err = repo_service.CreateRepository(ctx.User, ctx.User, models.CreateRepoOptions{ + Name: setting.FileNoteBook.ProjectName, + Alias: "", + Description: "", + IssueLabels: "", + Gitignores: "", + License: "", + Readme: "Default", + IsPrivate: false, + AutoInit: true, + DefaultBranch: "master", + }) + } + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo",setting.FileNoteBook.ProjectName))) + return + } + if option.Type <= 1 { + cloudBrainFileNoteBookCreate(ctx, option, repo, sourceRepo) + } else { + modelartsFileNoteBookCreate(ctx, option, repo, sourceRepo) + } + +} + +func cloudBrainFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) { + + displayJobName := cloudbrainService.GetDisplayJobName(ctx.User.Name) + jobName := util.ConvertDisplayJobNameToJobName(displayJobName) + jobType := string(models.JobTypeDebug) + + lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName)) + defer lock.UnLock() + isOk, err := lock.Lock(models.CloudbrainKeyDuration) + if !isOk { + log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + + tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName) + if err == nil { + if len(tasks) != 0 { + log.Error("the job name did already exist", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + } else { + if !models.IsErrJobNotExist(err) { + log.Error("system error, %v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } + } + + count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType) + if err != nil { + log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } else { + if count >= 1 { + log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK,models.BaseMessageApi{ + Code: 2, + Message: ctx.Tr("repo.cloudbrain.morethanonejob"), + }) + return + } + } + + errStr := uploadCodeFile(sourceRepo, getCodePath(jobName), option.BranchName, option.File, jobName) + if errStr != "" { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.notebook_file_not_exist"))) + return + } + command := cloudbrain.GetCloudbrainDebugCommand() + specId := setting.FileNoteBook.SpecIdGPU + if option.Type == 0 { + specId = setting.FileNoteBook.SpecIdCPU + } + spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{ + JobType: models.JobType(jobType), + ComputeResource: models.GPU, + Cluster: models.OpenICluster, + AiCenterCode: models.AICenterOfCloudBrainOne}) + if err != nil || spec == nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.wrong_specification"))) + return + } + + if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { + log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) + return + } + ctx.Repo = &context.Repository{ + Repository: repo, + } + + req := cloudbrain.GenerateCloudBrainTaskReq{ + Ctx: ctx, + DisplayJobName: displayJobName, + JobName: jobName, + Image: setting.FileNoteBook.ImageGPU, + Command: command, + Uuids: "", + DatasetNames: "", + DatasetInfos: nil, + CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), + ModelPath: storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"), + BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), + Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), + BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), + JobType: jobType, + Description: getDescription(option), + BranchName: option.BranchName, + BootFile: option.File, + Params: "{\"parameter\":[]}", + CommitID: "", + BenchmarkTypeID: 0, + BenchmarkChildTypeID: 0, + ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"), + Spec: spec, + } + + jobId, err := cloudbrain.GenerateTask(req) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) + return + } + ctx.JSON(http.StatusOK, models.BaseMessageApi{ + Code: 0, + Message: jobId, + }) + +} + +func getCodePath(jobName string) string { + return setting.JobPath + jobName + cloudbrain.CodeMountPath +} + +func getDescription(option api.CreateFileNotebookJobOption) string { + return option.OwnerName + "/" + option.ProjectName + "/" + option.File +} + +func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNotebookJobOption, repo *models.Repository, sourceRepo *models.Repository) { + displayJobName := cloudbrainService.GetDisplayJobName(ctx.User.Name) + jobName := util.ConvertDisplayJobNameToJobName(displayJobName) + + lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeDebug), displayJobName)) + isOk, err := lock.Lock(models.CloudbrainKeyDuration) + if !isOk { + log.Error("lock processed failed:%v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + defer lock.UnLock() + + count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeDebug)) + + if err != nil { + log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"]) + + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } else { + if count >= 1 { + log.Error("the user already has running or waiting task", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK,models.BaseMessageApi{ + Code: 2, + Message: ctx.Tr("repo.cloudbrain.morethanonejob"), + }) + return + } + } + + tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeDebug), displayJobName) + if err == nil { + if len(tasks) != 0 { + log.Error("the job name did already exist", ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err"))) + return + } + } else { + if !models.IsErrJobNotExist(err) { + log.Error("system error, %v", err, ctx.Data["MsgID"]) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error.")) + return + } + } + + err = downloadCode(sourceRepo, getCodePath(jobName), option.BranchName) + if err != nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed"))) + return + } + + var aiCenterCode = models.AICenterOfCloudBrainTwo + var specId = setting.FileNoteBook.SpecIdNPU + if setting.ModelartsCD.Enabled { + aiCenterCode = models.AICenterOfChengdu + specId = setting.FileNoteBook.SpecIdNPUCD + } + spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{ + JobType: models.JobTypeDebug, + ComputeResource: models.NPU, + Cluster: models.OpenICluster, + AiCenterCode: aiCenterCode}) + if err != nil || spec == nil { + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.wrong_specification"))) + return + } + if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) { + log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance"))) + return + } + ctx.Repo = &context.Repository{ + Repository: repo, + } + + var jobId string + if setting.ModelartsCD.Enabled { + jobId, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPUCD, spec, option.File,modelarts.AutoStopDurationMs/4) + } else { + jobId, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPU, spec, option.File,modelarts.AutoStopDurationMs/4) + } + + if err != nil { + log.Error("GenerateNotebook2 failed, %v", err, ctx.Data["MsgID"]) + + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error())) + + return + } + + ctx.JSON(http.StatusOK, models.BaseMessageApi{ + Code: 0, + Message: jobId, + }) + +} + +func isNoteBookFileExist(ctx *context.Context, option api.CreateFileNotebookJobOption) (bool, error) { + repoPathOfNoteBook := models.RepoPath(option.OwnerName, option.ProjectName) + + gitRepoOfNoteBook, err := git.OpenRepository(repoPathOfNoteBook) + if err != nil { + log.Error("RepoRef Invalid repo "+repoPathOfNoteBook, err.Error()) + return false, err + } + // We opened it, we should close it + defer func() { + // If it's been set to nil then assume someone else has closed it. + if gitRepoOfNoteBook != nil { + gitRepoOfNoteBook.Close() + } + }() + fileExist, err := fileExists(gitRepoOfNoteBook, option.File, option.BranchName) + if err != nil || !fileExist { + log.Error("Get file error:", err, ctx.Data["MsgID"]) + + return false, err + } + return true, nil +} + +func uploadCodeFile(repo *models.Repository, codePath string, branchName string, filePath string, jobName string) string { + err := downloadCode(repo, codePath, branchName) + if err != nil { + return "cloudbrain.load_code_failed" + } + + err = uploadOneFileToMinio(codePath, filePath, jobName, cloudbrain.CodeMountPath+"/") + if err != nil { + return "cloudbrain.load_code_failed" + } + return "" +} + +func fileExists(gitRepo *git.Repository, path string, branch string) (bool, error) { + + commit, err := gitRepo.GetBranchCommit(branch) + if err != nil { + return false, err + } + if _, err := commit.GetTreeEntryByPath(path); err != nil { + return false, err + } + return true, nil +} diff --git a/services/cloudbrain/cloudbrainTask/sync_status.go b/services/cloudbrain/cloudbrainTask/sync_status.go index 67dc4d3b7..973b9bbc2 100644 --- a/services/cloudbrain/cloudbrainTask/sync_status.go +++ b/services/cloudbrain/cloudbrainTask/sync_status.go @@ -1,20 +1,21 @@ package cloudbrainTask import ( - "net/http" - "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/cloudbrain" - "code.gitea.io/gitea/modules/httplib" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/setting" + "net/http" + "strconv" ) var noteBookOKMap = make(map[int64]int, 20) +var noteBookFailMap = make(map[int64]int, 20) -//if a task notebook url can get two times, the notebook can browser. +//if a task notebook url can get successfulCount times, the notebook can browser. const successfulCount = 3 +const maxSuccessfulCount=10 func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error) { jobResult, err := cloudbrain.GetJob(task.JobID) @@ -62,21 +63,29 @@ func isNoteBookReady(task *models.Cloudbrain) bool { return true } noteBookUrl := setting.DebugServerHost + "jpylab_" + task.JobID + "_" + task.SubTaskName - r := httplib.Get(noteBookUrl) - res, err := r.Response() + res,err := http.Get(noteBookUrl) if err != nil { return false } + log.Info("notebook success count:"+strconv.Itoa(noteBookOKMap[task.ID])+",fail count:"+strconv.Itoa(noteBookFailMap[task.ID])) if res.StatusCode == http.StatusOK { count := noteBookOKMap[task.ID] - if count < successfulCount-1 { + if count==0{ //如果是第一次成功,把失败数重置为0 + noteBookFailMap[task.ID]=0 + } + + if count < successfulCount-1 || (noteBookFailMap[task.ID]==0 && count < maxSuccessfulCount-1) { noteBookOKMap[task.ID] = count + 1 return false } else { + log.Info("notebook success count:"+strconv.Itoa(count)+",fail count:"+strconv.Itoa(noteBookFailMap[task.ID])) delete(noteBookOKMap, task.ID) + delete(noteBookFailMap, task.ID) return true } + }else{ + noteBookFailMap[task.ID]+=1 } return false diff --git a/services/cloudbrain/cloudbrainTask/train.go b/services/cloudbrain/cloudbrainTask/train.go index 8e4673d66..00d01a7ce 100644 --- a/services/cloudbrain/cloudbrainTask/train.go +++ b/services/cloudbrain/cloudbrainTask/train.go @@ -810,6 +810,18 @@ func uploadCodeToMinio(codePath, jobName, parentDir string) error { return nil } +func uploadOneFileToMinio(codePath, filePath, jobName, parentDir string) error { + destObject := setting.CBCodePathPrefix + jobName + parentDir + path.Base(filePath) + sourceFile := codePath + "/" + filePath + err := storage.Attachments.UploadObject(destObject, sourceFile) + if err != nil { + log.Error("UploadObject(%s) failed: %s", filePath, err.Error()) + return err + } + return nil + +} + func readDir(dirname string) ([]os.FileInfo, error) { f, err := os.Open(dirname) if err != nil { diff --git a/services/cloudbrain/resource/resource_specification.go b/services/cloudbrain/resource/resource_specification.go index 8f4182d87..5070d7c1e 100644 --- a/services/cloudbrain/resource/resource_specification.go +++ b/services/cloudbrain/resource/resource_specification.go @@ -246,10 +246,10 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S return nil, err } //filter exclusive specs - specs := filterExclusiveSpecs(r, userId) + specs := models.FilterExclusiveSpecs(r, userId) //distinct by sourceSpecId - specs = distinctSpecs(specs) + specs = models.DistinctSpecs(specs) return specs, err } @@ -265,50 +265,6 @@ func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api return result, nil } -func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification { - specs := make([]*models.Specification, 0, len(r)) - specMap := make(map[int64]string, 0) - for i := 0; i < len(r); i++ { - spec := r[i] - if _, has := specMap[spec.ID]; has { - continue - } - if !spec.IsExclusive { - specs = append(specs, spec) - specMap[spec.ID] = "" - continue - } - orgs := strings.Split(spec.ExclusiveOrg, ";") - for _, org := range orgs { - isMember, _ := models.IsOrganizationMemberByOrgName(org, userId) - if isMember { - specs = append(specs, spec) - specMap[spec.ID] = "" - break - } - } - } - return specs -} - -func distinctSpecs(r []*models.Specification) []*models.Specification { - specs := make([]*models.Specification, 0, len(r)) - sourceSpecIdMap := make(map[string]string, 0) - for i := 0; i < len(r); i++ { - spec := r[i] - if spec.SourceSpecId == "" { - specs = append(specs, spec) - continue - } - if _, has := sourceSpecIdMap[spec.SourceSpecId]; has { - continue - } - specs = append(specs, spec) - sourceSpecIdMap[spec.SourceSpecId] = "" - } - return specs -} - func GetAndCheckSpec(userId int64, specId int64, opts models.FindSpecsOptions) (*models.Specification, error) { if specId == 0 { return nil, nil diff --git a/services/cloudbrain/util.go b/services/cloudbrain/util.go index ab738927e..0a3096e3f 100644 --- a/services/cloudbrain/util.go +++ b/services/cloudbrain/util.go @@ -1,27 +1,33 @@ package cloudbrain import ( + "regexp" + "strconv" + "strings" + "time" + + + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/setting" - "strings" ) -func GetAiCenterShow(aiCenter string,ctx *context.Context) string{ +func GetAiCenterShow(aiCenter string, ctx *context.Context) string { aiCenterInfo := strings.Split(aiCenter, "+") - if len(aiCenterInfo) == 2{ - if setting.C2NetMapInfo!=nil { - if info,ok:=setting.C2NetMapInfo[aiCenterInfo[0]];ok { + if len(aiCenterInfo) == 2 { + if setting.C2NetMapInfo != nil { + if info, ok := setting.C2NetMapInfo[aiCenterInfo[0]]; ok { if ctx.Language() == "zh-CN" { return info.Content } else { return info.ContentEN } - }else{ + } else { return aiCenterInfo[1] } - }else{ + } else { return aiCenterInfo[1] } @@ -29,5 +35,64 @@ func GetAiCenterShow(aiCenter string,ctx *context.Context) string{ return "" +} + +func GetDisplayJobName(username string) string { + t := time.Now() + return jobNamePrefixValid(cutString(username, 5)) + t.Format("2006010215") + strconv.Itoa(int(t.Unix()))[5:] +} + +func cutString(str string, lens int) string { + if len(str) < lens { + return str + } + return str[:lens] +} + +func jobNamePrefixValid(s string) string { + lowStr := strings.ToLower(s) + re := regexp.MustCompile(`[^a-z0-9_\\-]+`) + + removeSpecial := re.ReplaceAllString(lowStr, "") + + re = regexp.MustCompile(`^[_\\-]+`) + return re.ReplaceAllString(removeSpecial, "") +} + +func GetAiCenterInfoByCenterCode(aiCenterCode string) *setting.C2NetSequenceInfo { + if setting.AiCenterCodeAndNameMapInfo != nil { + if info, ok := setting.AiCenterCodeAndNameMapInfo[aiCenterCode]; ok { + return info + } else { + return nil + } + } else { + return nil + } +} + +func getAiCenterCode(aiCenter string) string { + aiCenterInfo := strings.Split(aiCenter, "+") + return aiCenterInfo[0] +} + +func UpdateCloudbrainAiCenter(cloudbrain *models.CloudbrainInfo) *models.CloudbrainInfo { + if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne { + cloudbrain.Cloudbrain.AiCenter = models.AICenterOfCloudBrainOne + cloudbrain.Cloudbrain.Cluster = models.OpenICluster + } + if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo { + cloudbrain.Cloudbrain.AiCenter = models.AICenterOfCloudBrainTwo + cloudbrain.Cloudbrain.Cluster = models.OpenICluster + } + if cloudbrain.Cloudbrain.Type == models.TypeCDCenter { + cloudbrain.Cloudbrain.AiCenter = models.AICenterOfChengdu + cloudbrain.Cloudbrain.Cluster = models.OpenICluster + } + if cloudbrain.Cloudbrain.Type == models.TypeC2Net { + cloudbrain.Cloudbrain.AiCenter = getAiCenterCode(cloudbrain.Cloudbrain.AiCenter) + cloudbrain.Cloudbrain.Cluster = models.C2NetCluster + } + return cloudbrain } diff --git a/services/repository/repository.go b/services/repository/repository.go index b4c047392..db25010ea 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -107,18 +107,13 @@ func GetRecommendCourseKeyWords() ([]string, error) { } -func GetRecommendRepoFromPromote(filename string) ([]map[string]interface{}, error) { +func GetRecommendRepoFromPromote(repoMap []map[string]string) ([]map[string]interface{}, error) { resultRepo := make([]map[string]interface{}, 0) - url := setting.RecommentRepoAddr + filename - result, err := RecommendFromPromote(url) - - if err != nil { - - return resultRepo, err - } //resultRepo := make([]*models.Repository, 0) - for _, repoName := range result { + for _, record := range repoMap { + repoName := record["project_url"] + //log.Info("repoName=" + repoName + " tmpIndex1=" + fmt.Sprint(tmpIndex1) + " len(repoName)=" + fmt.Sprint(len(repoName))) tmpIndex := strings.Index(repoName, "/") if tmpIndex == -1 { log.Info("error repo name format.") @@ -131,7 +126,8 @@ func GetRecommendRepoFromPromote(filename string) ([]map[string]interface{}, err repoMap["ID"] = fmt.Sprint(repo.ID) repoMap["Name"] = repo.Name repoMap["Alias"] = repo.Alias - + repoMap["Label"] = record["class"] + repoMap["Label_en"] = record["class_en"] repoMap["OwnerName"] = repo.OwnerName repoMap["NumStars"] = repo.NumStars repoMap["NumForks"] = repo.NumForks diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl index b6dfec77d..94f80c0fa 100755 --- a/templates/admin/cloudbrain/list.tmpl +++ b/templates/admin/cloudbrain/list.tmpl @@ -170,7 +170,7 @@
- {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}} + {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}
@@ -184,16 +184,16 @@ spanEl.setAttribute('title', cardType); spanEl.innerText = cardType; - var cluster = spec.Cluster || '--'; + var cluster = {{.Cluster}} || '--'; var clusterName = document.querySelector('.cloudbrain_debug').dataset['cluster' + cluster[0] + cluster.toLocaleLowerCase().slice(1)] || '--'; spanEl = document.querySelector('.cluster_{{.DisplayJobName}}_{{$JobID}}'); spanEl.setAttribute('title', cluster); spanEl.innerText = clusterName; - var aiCenter = spec.AiCenterName || '--'; - spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}'); - spanEl.setAttribute('title', aiCenter); - spanEl.innerText = aiCenter; + // var aiCenter = spec.AiCenterName || '--'; + // spanEl = document.querySelector('.aicenter_{{.DisplayJobName}}_{{$JobID}}'); + // spanEl.setAttribute('title', aiCenter); + // spanEl.innerText = aiCenter; })(); @@ -238,6 +238,7 @@ {{$.i18n.Tr "repo.debug"}} {{else}} + {{if not .BootFile}} {{end}} + {{end}}
{{end}} diff --git a/templates/admin/cloudbrain/search.tmpl b/templates/admin/cloudbrain/search.tmpl index 12e8a7515..09e5f865e 100644 --- a/templates/admin/cloudbrain/search.tmpl +++ b/templates/admin/cloudbrain/search.tmpl @@ -71,22 +71,20 @@ document.addEventListener('DOMContentLoaded', function() { $.ajax({ type: "GET", - url: "/api/v1/cloudbrain/get_center_info", + url: "/api/v1/cloudbrainboard/cloudbrain/resource_queues", dataType: "json", data: {}, success: function (res) { - var data = res || []; + var data = res.resourceQueues || []; var aiCenterSelEl = $('#aiCenter-sel'); var itemEl = aiCenterSelEl.find('.menu .item').eq(0); var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); var selectAiCenterName = ''; var lang = document.querySelector('html').getAttribute('lang') || 'en-US'; - var except = ['', 'more']; for (var i = 0, iLen = data.length; i < iLen; i++) { var dataI = data[i]; - var aiCenterCode = dataI.name; - if (except.indexOf(aiCenterCode) >= 0) continue; - var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content; + var aiCenterCode = dataI.AiCenterCode; + var aiCenterName = dataI.AiCenterName; var itemClone = itemEl.clone(); var oHref = itemClone.attr('href'); var oId = itemClone.attr('id'); diff --git a/templates/admin/cloudbrain/search_dashboard.tmpl b/templates/admin/cloudbrain/search_dashboard.tmpl index 2bf738dc9..7c4c1527d 100644 --- a/templates/admin/cloudbrain/search_dashboard.tmpl +++ b/templates/admin/cloudbrain/search_dashboard.tmpl @@ -85,22 +85,20 @@ document.addEventListener('DOMContentLoaded', function() { $.ajax({ type: "GET", - url: "/api/v1/cloudbrain/get_center_info", + url: "/api/v1/cloudbrainboard/cloudbrain/resource_queues", dataType: "json", data: {}, success: function (res) { - var data = res || []; + var data = res.resourceQueues || []; var aiCenterSelEl = $('#aiCenter-sel'); var itemEl = aiCenterSelEl.find('.menu .item').eq(0); var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); var selectAiCenterName = ''; var lang = document.querySelector('html').getAttribute('lang') || 'en-US'; - var except = ['', 'more']; for (var i = 0, iLen = data.length; i < iLen; i++) { var dataI = data[i]; - var aiCenterCode = dataI.name; - if (except.indexOf(aiCenterCode) >= 0) continue; - var aiCenterName = lang === 'en-US' ? dataI.content_en : dataI.content; + var aiCenterCode = dataI.AiCenterCode; + var aiCenterName = dataI.AiCenterName; var itemClone = itemEl.clone(); var oHref = itemClone.attr('href'); var oId = itemClone.attr('id'); diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index b4c8518c4..3a35e69a3 100755 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -1,15 +1,17 @@ -