diff --git a/models/action.go b/models/action.go
index 4b6f1dbad..869acb762 100755
--- a/models/action.go
+++ b/models/action.go
@@ -65,6 +65,8 @@ const (
ActionCreateImage //36
ActionImageRecommend //37
ActionChangeUserAvatar //38
+ ActionCreateGrampusNPUDebugTask //39
+ ActionCreateGrampusGPUDebugTask //40
)
// Action represents user operation type and other information to
@@ -375,6 +377,8 @@ func (a *Action) IsCloudbrainAction() bool {
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
+ ActionCreateGrampusGPUDebugTask,
+ ActionCreateGrampusNPUDebugTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return true
diff --git a/models/ai_model_manage.go b/models/ai_model_manage.go
index 5b14b9ba2..866088a35 100644
--- a/models/ai_model_manage.go
+++ b/models/ai_model_manage.go
@@ -33,6 +33,7 @@ type AiModelManage struct {
CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"`
CodeCommitID string `xorm:"NULL" json:"codeCommitID"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
+ IsPrivate bool `xorm:"DEFAULT true" json:"isPrivate"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"`
@@ -40,6 +41,7 @@ type AiModelManage struct {
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
+ IsCanDownload bool `json:"isCanDownload"`
}
type AiModelConvert struct {
@@ -84,8 +86,10 @@ type AiModelQueryOptions struct {
SortType string
New int
// JobStatus CloudbrainStatus
- Type int
- Status int
+ Type int
+ Status int
+ IsOnlyThisRepo bool
+ IsQueryPrivate bool
}
func (a *AiModelConvert) IsGpuTrainTask() bool {
@@ -217,6 +221,19 @@ func SaveModelToDb(model *AiModelManage) error {
return nil
}
+func QueryModelConvertByName(name string, repoId int64) ([]*AiModelConvert, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+ sess.Select("*").Table(new(AiModelConvert)).
+ Where("name='" + name + "' and repo_id=" + fmt.Sprint(repoId)).OrderBy("created_unix desc")
+ aiModelManageConvertList := make([]*AiModelConvert, 0)
+ err := sess.Find(&aiModelManageConvertList)
+ if err == nil {
+ return aiModelManageConvertList, nil
+ }
+ return nil, err
+}
+
func QueryModelConvertById(id string) (*AiModelConvert, error) {
sess := x.NewSession()
defer sess.Close()
@@ -288,15 +305,30 @@ func ModifyModelDescription(id string, description string) error {
return nil
}
-func ModifyLocalModel(id string, name, label, description string, engine int) error {
+func ModifyModelPrivate(id string, isPrivate bool) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
- re, err := sess.Cols("name", "label", "description", "engine").Update(&AiModelManage{
+ re, err := sess.Cols("is_private").Update(&AiModelManage{
+ IsPrivate: isPrivate,
+ })
+ if err != nil {
+ return err
+ }
+ log.Info("success to update isPrivate from db.re=" + fmt.Sprint((re)))
+ return nil
+}
+
+func ModifyLocalModel(id string, name, label, description string, engine int, isPrivate bool) error {
+ var sess *xorm.Session
+ sess = x.ID(id)
+ defer sess.Close()
+ re, err := sess.Cols("name", "label", "description", "engine", "is_private").Update(&AiModelManage{
Description: description,
Name: name,
Label: label,
Engine: int64(engine),
+ IsPrivate: isPrivate,
})
if err != nil {
return err
@@ -371,6 +403,18 @@ func QueryModelByName(name string, repoId int64) []*AiModelManage {
return aiModelManageList
}
+func QueryModelByPath(path string) (*AiModelManage, error) {
+ modelManage := new(AiModelManage)
+ has, err := x.Where("path=?", path).Get(modelManage)
+ if err != nil {
+ return nil, err
+ }
+ if !has {
+ return nil, ErrNotExist{}
+ }
+ return modelManage, nil
+}
+
func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) {
sess := x.NewSession()
defer sess.Close()
@@ -411,7 +455,11 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) {
builder.Eq{"ai_model_manage.status": opts.Status},
)
}
-
+ if !opts.IsQueryPrivate {
+ cond = cond.And(
+ builder.Eq{"ai_model_manage.is_private": false},
+ )
+ }
count, err := sess.Where(cond).Count(new(AiModelManage))
if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
diff --git a/models/cloudbrain.go b/models/cloudbrain.go
index 3dc8d9694..34869c1ee 100755
--- a/models/cloudbrain.go
+++ b/models/cloudbrain.go
@@ -114,6 +114,7 @@ const (
GrampusStatusFailed = "FAILED"
GrampusStatusSucceeded = "SUCCEEDED"
GrampusStatusStopped = "STOPPED"
+ GrampusStatusStopping = "STOPPING"
GrampusStatusUnknown = "UNKNOWN"
GrampusStatusWaiting = "WAITING"
@@ -181,7 +182,7 @@ type Cloudbrain struct {
BranchName string //分支名称
Parameters string //传给modelarts的param参数
BootFile string //启动文件
- DataUrl string //数据集的obs路径
+ DataUrl string `xorm:"varchar(3500)"` //数据集的obs路径
LogUrl string //日志输出的obs路径
PreVersionId int64 //父版本的版本id
FlavorCode string //modelarts上的规格id
@@ -204,6 +205,7 @@ type Cloudbrain struct {
BenchmarkTypeRankLink string `xorm:"-"`
StartTime timeutil.TimeStamp
EndTime timeutil.TimeStamp
+ Cleared bool `xorm:"DEFAULT false"`
Spec *Specification `xorm:"-"`
}
@@ -297,6 +299,12 @@ func (task *Cloudbrain) IsUserHasRight(user *User) bool {
}
return user.IsAdmin || user.ID == task.UserID
}
+func (task *Cloudbrain) IsGPUTask() bool {
+ return task.ComputeResource == GPUResource
+}
+func (task *Cloudbrain) IsNPUTask() bool {
+ return task.ComputeResource == NPUResource
+}
func ConvertDurationToStr(duration int64) string {
if duration <= 0 {
@@ -1060,6 +1068,9 @@ type UserImageConfig struct {
CreateVersion bool `json:"create_version"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type CreateTrainJobParams struct {
@@ -1083,13 +1094,18 @@ type Config struct {
CreateVersion bool `json:"create_version"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
+
type CreateInferenceJobParams struct {
JobName string `json:"job_name"`
Description string `json:"job_desc"`
InfConfig InfConfig `json:"config"`
WorkspaceID string `json:"workspace_id"`
}
+
type CreateInfUserImageParams struct {
JobName string `json:"job_name"`
Description string `json:"job_desc"`
@@ -1147,6 +1163,9 @@ type TrainJobVersionConfig struct {
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
PreVersionId int64 `json:"pre_version_id"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type TrainJobVersionUserImageConfig struct {
@@ -1162,6 +1181,9 @@ type TrainJobVersionUserImageConfig struct {
PreVersionId int64 `json:"pre_version_id"`
UserImageUrl string `json:"user_image_url"`
UserCommand string `json:"user_command"`
+ ShareAddr string `json:"nas_share_addr"`
+ MountPath string `json:"nas_mount_path"`
+ NasType string `json:"nas_type"`
}
type CreateConfigParams struct {
@@ -1177,6 +1199,7 @@ type CreateConfigParams struct {
LogUrl string `json:"log_url"`
Flavor Flavor `json:"flavor"`
PoolID string `json:"pool_id"`
+ Volumes []Volumes `json:"volumes"`
}
type Parameter struct {
@@ -1199,6 +1222,13 @@ type DatasetDownload struct {
IsDelete bool `json:"is_delete"`
}
+type ModelDownload struct {
+ Name string `json:"name"`
+ DownloadLink string `json:"download_link"`
+ RepositoryLink string `json:"repository_link"`
+ IsDelete bool `json:"is_delete"`
+}
+
type DataSource struct {
DatasetID string `json:"dataset_id"`
DatasetVersion string `json:"dataset_version"`
@@ -1442,6 +1472,20 @@ type GrampusJobInfo struct {
UserID string `json:"userId"`
Tasks []GrampusTasks `json:"tasks"`
}
+
+type GrampusNotebookInfo struct {
+ StartedAt int64 `json:"startedAt"`
+ RunSec int64 `json:"runSec"`
+ CompletedAt int64 `json:"completedAt"`
+ CreatedAt int64 `json:"createdAt"`
+ UpdatedAt int64 `json:"updatedAt"`
+ Desc string `json:"desc"`
+ JobID string `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+ UserID string `json:"userId"`
+ Tasks []GrampusNotebookTask `json:"tasks"`
+}
type Center struct {
ID string `json:"id"`
Name string `json:"name"`
@@ -1518,9 +1562,22 @@ type GetGrampusJobResponse struct {
JobInfo GrampusJobInfo `json:"otJob"`
}
+type GrampusNotebookResponse struct {
+ GrampusResult
+ JobInfo GrampusNotebookInfo `json:"otJob"`
+}
+
+type GrampusNotebookRestartResponse struct {
+ GrampusResult
+ NewId string `json:"newId"`
+ Status string `json:"status"`
+}
+
type GrampusStopJobResponse struct {
GrampusResult
- StoppedAt int64 `json:"stoppedAt"`
+ StoppedAt int64 `json:"stoppedAt"`
+ ID string `json:"id"`
+ Status string `json:"status"`
}
type GrampusTasks struct {
@@ -1537,12 +1594,32 @@ type GrampusTasks struct {
Code GrampusDataset `json:"code"`
BootFile string `json:"bootFile"`
}
+type GrampusNotebookTask struct {
+ AutoStopDuration int `json:"autoStopDuration"`
+ Name string `json:"name"`
+ Capacity int `json:"capacity"`
+ CenterID []string `json:"centerID"`
+ CenterName []string `json:"centerName"`
+ Code GrampusDataset `json:"code"`
+ Datasets []GrampusDataset `json:"datasets"`
+ CodeUrl string `json:"codeUrl"`
+ DataUrl string `json:"dataUrl"`
+ ImageId string `json:"imageId"`
+ ImageUrl string `json:"imageUrl"`
+ ResourceSpecId string `json:"resourceSpecId"`
+ Token string `json:"token"`
+ Url string `json:"url"`
+ Status string `json:"status"`
+ Command string `json:"command"`
+}
type GrampusDataset struct {
- Name string `json:"name"`
- Bucket string `json:"bucket"`
- EndPoint string `json:"endPoint"`
- ObjectKey string `json:"objectKey"`
+ Name string `json:"name"`
+ Bucket string `json:"bucket"`
+ EndPoint string `json:"endPoint"`
+ ObjectKey string `json:"objectKey"`
+ ContainerPath string `json:"containerPath"`
+ ReadOnly bool `json:"readOnly"`
}
type CreateGrampusJobRequest struct {
@@ -1550,6 +1627,11 @@ type CreateGrampusJobRequest struct {
Tasks []GrampusTasks `json:"tasks"`
}
+type CreateGrampusNotebookRequest struct {
+ Name string `json:"name"`
+ Tasks []GrampusNotebookTask `json:"tasks"`
+}
+
type GetTrainJobMetricStatisticResult struct {
TrainJobResult
Interval int `json:"interval"` //查询的时间间隔,单位为分钟
@@ -1861,6 +1943,7 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) {
session.Commit()
go IncreaseDatasetUseCount(cloudbrain.Uuid)
+ go OperateRepoAITaskNum(cloudbrain.RepoID, 1)
return nil
}
@@ -1905,6 +1988,12 @@ func GetCloudbrainByID(id string) (*Cloudbrain, error) {
return getRepoCloudBrain(cb)
}
+func IsCloudbrainExistByJobName(jobName string)(bool,error){
+ return x.Unscoped().Exist(&Cloudbrain{
+ JobName: jobName,
+ })
+}
+
func GetCloudbrainByIDWithDeleted(id string) (*Cloudbrain, error) {
idInt64, _ := strconv.ParseInt(id, 10, 64)
cb := &Cloudbrain{ID: idInt64}
@@ -2010,9 +2099,29 @@ func DeleteJob(job *Cloudbrain) error {
func deleteJob(e Engine, job *Cloudbrain) error {
_, err := e.ID(job.ID).Delete(job)
+ if err == nil {
+ go updateAITaskNumWhenDeleteJob(job)
+ }
return err
}
+func updateAITaskNumWhenDeleteJob(job *Cloudbrain) {
+ repoId := job.RepoID
+ if repoId == 0 {
+ t := &Cloudbrain{}
+ _, tempErr := x.ID(job.ID).Unscoped().Get(t)
+ if tempErr != nil {
+ log.Error("updateAITaskNumWhenDeleteJob error.%v", tempErr)
+ return
+ }
+ repoId = t.RepoID
+ }
+
+ if repoId > 0 {
+ go OperateRepoAITaskNum(repoId, -1)
+ }
+}
+
func GetCloudbrainByName(jobName string) (*Cloudbrain, error) {
cb := &Cloudbrain{JobName: jobName}
return getRepoCloudBrain(cb)
@@ -2050,6 +2159,83 @@ func GetCloudBrainUnStoppedJob() ([]*Cloudbrain, error) {
Find(&cloudbrains)
}
+func GetCloudBrainOneStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
+ cloudbrains := make([]*Cloudbrain, 0, 10)
+ endTimeBefore := time.Now().Unix() - int64(days)*24*3600
+ missEndTimeBefore := endTimeBefore - 24*3600
+ return cloudbrains, x.Unscoped().Cols("id,job_name,job_id").
+ In("status",
+ JobStopped, JobSucceeded, JobFailed, ModelArtsCreateFailed, ModelArtsStartFailed, ModelArtsUnavailable, ModelArtsResizFailed, ModelArtsDeleted,
+ ModelArtsStopped, ModelArtsTrainJobCanceled, ModelArtsTrainJobCheckFailed, ModelArtsTrainJobCompleted, ModelArtsTrainJobDeleteFailed, ModelArtsTrainJobDeployServiceFailed,
+ ModelArtsTrainJobFailed, ModelArtsTrainJobImageFailed, ModelArtsTrainJobKilled, ModelArtsTrainJobLost, ModelArtsTrainJobSubmitFailed, ModelArtsTrainJobSubmitModelFailed).
+ Where("(((end_time is null or end_time=0) and updated_unix and updated_unix != 0 ) or (end_time and end_time != 0)) and cleared=false and type=0 and job_type != 'DEBUG'", missEndTimeBefore, endTimeBefore).
+ Limit(limit).
+ Find(&cloudbrains)
+}
+/**
+ 本方法考虑了再次调试的情况,多次调试取最后一次的任务的结束时间
+ */
+func GetCloudBrainOneStoppedDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
+ cloudbrains := make([]*Cloudbrain, 0, 10)
+ endTimeBefore := time.Now().Unix() - int64(days)*24*3600
+ missEndTimeBefore := endTimeBefore - 24*3600
+ sql:=`SELECT id,job_name,job_id from (SELECT DISTINCT ON (job_name)
+ id, job_name, job_id,status,end_time,updated_unix,cleared
+ FROM cloudbrain
+ where type=0 and job_type='DEBUG'
+ ORDER BY job_name, updated_unix DESC) a
+ where status in ('STOPPED','SUCCEEDED','FAILED') and (((end_time is null or end_time=0) and updated_unix and updated_unix != 0 ) or (end_time and end_time != 0)) and cleared=false`
+
+ return cloudbrains, x.Unscoped().SQL(sql,missEndTimeBefore, endTimeBefore).Limit(limit).Find(&cloudbrains)
+
+}
+
+
+func UpdateCloudBrainRecordsCleared(ids []int64) error {
+ pageSize := 150
+ n := len(ids) / pageSize
+
+ var err error
+
+ for i := 1; i <= n+1; i++ {
+ tempIds := getPageIds(ids, i, pageSize)
+ if len(tempIds) > 0 {
+ idsIn := ""
+ for i, id := range tempIds {
+ if i == 0 {
+ idsIn += strconv.FormatInt(id, 10)
+ } else {
+ idsIn += "," + strconv.FormatInt(id, 10)
+ }
+ }
+
+ _, errTemp := x.Unscoped().Exec("update cloudbrain set cleared=true where id in (" + idsIn + ")")
+ if errTemp != nil {
+ err = errTemp
+ }
+
+ }
+
+ }
+ return err
+
+}
+
+func getPageIds(ids []int64, page int, pagesize int) []int64 {
+ begin := (page - 1) * pagesize
+ end := (page) * pagesize
+
+ if begin > len(ids)-1 {
+ return []int64{}
+ }
+ if end > len(ids)-1 {
+ return ids[begin:]
+ } else {
+ return ids[begin:end]
+ }
+
+}
+
func GetStoppedJobWithNoDurationJob() ([]*Cloudbrain, error) {
cloudbrains := make([]*Cloudbrain, 0)
return cloudbrains, x.
@@ -2138,7 +2324,6 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) {
}
go IncreaseDatasetUseCount(new.Uuid)
-
return nil
}
func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {
diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go
index a213f179c..beb1ceee5 100644
--- a/models/cloudbrain_static.go
+++ b/models/cloudbrain_static.go
@@ -92,6 +92,17 @@ type HourTimeStatistic struct {
HourTimeTotalDuration map[string]int `json:"hourTimeTotalDuration"`
HourTimeUsageRate map[string]float64 `json:"hourTimeUsageRate"`
}
+type CloudbrainTypeDuration []struct {
+ Type int `xorm:"type"`
+ DurationSum int `xorm:"durationSum"`
+ CardDurationSum int `xorm:"cardDurationSum"`
+ Count int `xorm:"count"`
+}
+type CloudbrainAllDuration struct {
+ DurationSum int `xorm:"durationSum"`
+ CardDurationSum int `xorm:"cardDurationSum"`
+ Count int `xorm:"count"`
+}
func GetTodayCreatorCount(beginTime time.Time, endTime time.Time) (int64, error) {
countSql := "SELECT count(distinct user_id) FROM " +
@@ -183,6 +194,17 @@ func GetWaittingTop() ([]*CloudbrainInfo, error) {
Find(&cloudbrains); err != nil {
log.Info("find error.")
}
+
+ var ids []int64
+ for _, task := range cloudbrains {
+ ids = append(ids, task.RepoID)
+ }
+ repositoryMap, err := GetRepositoriesMapByIDs(ids)
+ if err == nil {
+ for _, task := range cloudbrains {
+ task.Repo = repositoryMap[task.RepoID]
+ }
+ }
return cloudbrains, nil
}
@@ -199,6 +221,16 @@ func GetRunningTop() ([]*CloudbrainInfo, error) {
Find(&cloudbrains); err != nil {
log.Info("find error.")
}
+ var ids []int64
+ for _, task := range cloudbrains {
+ ids = append(ids, task.RepoID)
+ }
+ repositoryMap, err := GetRepositoriesMapByIDs(ids)
+ if err == nil {
+ for _, task := range cloudbrains {
+ task.Repo = repositoryMap[task.RepoID]
+ }
+ }
return cloudbrains, nil
}
@@ -282,7 +314,7 @@ func GetCloudbrainByTime(beginTime int64, endTime int64) ([]*CloudbrainInfo, err
builder.And(builder.Gte{"cloudbrain.start_time": beginTime}, builder.Lte{"cloudbrain.start_time": endTime}, builder.Gt{"cloudbrain.start_time": 0}),
)
cond = cond.Or(
- builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}),
+ builder.And(builder.Eq{"cloudbrain.status": string(JobRunning)}, builder.Lte{"cloudbrain.start_time": beginTime}),
)
sess.OrderBy("cloudbrain.id ASC")
cloudbrains := make([]*CloudbrainInfo, 0, 10)
@@ -404,3 +436,55 @@ func DeleteCloudbrainDurationStatistic(beginTime timeutil.TimeStamp, endTime tim
}
return nil
}
+
+func GetCloudbrainTypeCardDuration() (CloudbrainTypeDuration, error) {
+ query := `
+ SELECT
+ cloudbrain.type,
+ SUM(cloudbrain.duration) as durationSum,
+ SUM(
+ COALESCE(cloudbrain.duration *
+ CASE
+ WHEN cloudbrain.work_server_number = 0 THEN 1
+ ELSE COALESCE(cloudbrain.work_server_number, 1)
+ END *
+ COALESCE(cloudbrain_spec.acc_cards_num, 1), 0)
+ ) as cardDurationSum,
+ COUNT(*) as count
+ FROM cloudbrain
+ LEFT JOIN cloudbrain_spec
+ ON cloudbrain.id = cloudbrain_spec.cloudbrain_id
+ GROUP BY cloudbrain.type
+ `
+ // 执行查询
+ var results CloudbrainTypeDuration
+ if err := x.SQL(query).Find(&results); err != nil {
+ panic(err)
+ }
+ return results, nil
+}
+
+func GetCloudbrainAllCardDuration() (CloudbrainAllDuration, error) {
+ query := `
+ SELECT
+ SUM(cloudbrain.duration) as durationSum,
+ SUM(
+ COALESCE(cloudbrain.duration *
+ CASE
+ WHEN cloudbrain.work_server_number = 0 THEN 1
+ ELSE COALESCE(cloudbrain.work_server_number, 1)
+ END *
+ COALESCE(cloudbrain_spec.acc_cards_num, 1), 0)
+ ) as cardDurationSum,
+ COUNT(*) as count
+ FROM cloudbrain
+ LEFT JOIN cloudbrain_spec
+ ON cloudbrain.id = cloudbrain_spec.cloudbrain_id
+ `
+ // 执行查询
+ var result CloudbrainAllDuration
+ if _, err := x.SQL(query).Get(&result); err != nil {
+ panic(err)
+ }
+ return result, nil
+}
diff --git a/models/list_options.go b/models/list_options.go
index 0946917fe..d6d1dcf0d 100644
--- a/models/list_options.go
+++ b/models/list_options.go
@@ -10,6 +10,26 @@ import (
"xorm.io/xorm"
)
+type AvailablePageSize int
+
+const (
+ PageSize15 AvailablePageSize = 15
+ PageSize30 AvailablePageSize = 30
+ PageSize50 AvailablePageSize = 50
+)
+
+func (s AvailablePageSize) IsLegal() bool {
+ switch s {
+ case PageSize30, PageSize50, PageSize15:
+ return true
+ }
+ return false
+}
+
+func (s AvailablePageSize) Int() int {
+ return int(s)
+}
+
// ListOptions options to paginate results
type ListOptions struct {
PageSize int
diff --git a/models/repo.go b/models/repo.go
index 832e3fc37..e390ef70d 100755
--- a/models/repo.go
+++ b/models/repo.go
@@ -231,10 +231,43 @@ type Repository struct {
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
- Hot int64 `xorm:"-"`
- Active int64 `xorm:"-"`
- Alias string `xorm:"INDEX"`
- LowerAlias string `xorm:"INDEX"`
+ Hot int64 `xorm:"-"`
+ Active int64 `xorm:"-"`
+ Alias string `xorm:"INDEX"`
+ LowerAlias string `xorm:"INDEX"`
+ AiTaskCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ ModelCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ DatasetCnt int64 `xorm:"NOT NULL DEFAULT 0"`
+ LastMonthVisits int64 `xorm:"NOT NULL DEFAULT 0"`
+ LastFourMonthCommits int64 `xorm:"NOT NULL DEFAULT 0"`
+}
+
+// Repository4Card format for front display
+type Repository4Card struct {
+ ID int64
+ OwnerID int64
+ OwnerName string
+ LowerName string
+ Name string
+ Alias string
+ NumWatches int
+ NumStars int
+ NumForks int
+ Description string
+ Topics []string
+ AiTaskCnt int64
+ ModelCnt int64
+ DatasetCnt int64
+ CreatedUnix timeutil.TimeStamp
+ UpdatedUnix timeutil.TimeStamp
+ PrimaryLanguage *LanguageStat
+ RelAvatarLink string
+ Contributors []*ContributorInfo
+ IsPrivate bool
+ IsFork bool
+ IsMirror bool
+ IsOwnerPrivate bool
+ IsArchived bool
}
type RepositoryShow struct {
@@ -243,6 +276,47 @@ type RepositoryShow struct {
Alias string
}
+func (repo *Repository) ToCardFormat() *Repository4Card {
+ link := repo.RelAvatarLink()
+ var isOwnerPrivate bool
+ if repo.Owner != nil && repo.Owner.Visibility.IsPrivate() {
+ isOwnerPrivate = true
+ }
+ result := &Repository4Card{
+ ID: repo.ID,
+ OwnerID: repo.OwnerID,
+ OwnerName: repo.OwnerName,
+ LowerName: repo.LowerName,
+ Name: repo.Name,
+ NumWatches: repo.NumWatches,
+ NumStars: repo.NumStars,
+ NumForks: repo.NumForks,
+ Description: repo.Description,
+ Topics: repo.Topics,
+ AiTaskCnt: repo.AiTaskCnt,
+ ModelCnt: repo.ModelCnt,
+ DatasetCnt: repo.DatasetCnt,
+ CreatedUnix: repo.CreatedUnix,
+ UpdatedUnix: repo.UpdatedUnix,
+ PrimaryLanguage: repo.PrimaryLanguage,
+ RelAvatarLink: link,
+ Alias: repo.Alias,
+ IsPrivate: repo.IsPrivate,
+ IsFork: repo.IsFork,
+ IsMirror: repo.IsMirror,
+ IsOwnerPrivate: isOwnerPrivate,
+ IsArchived: repo.IsArchived,
+ }
+ return result
+}
+
+type ContributorInfo struct {
+ RelAvatarLink string
+ UserName string
+ Email string
+ CommitCnt int
+}
+
// SanitizedOriginalURL returns a sanitized OriginalURL
func (repo *Repository) SanitizedOriginalURL() string {
if repo.OriginalURL == "" {
@@ -2379,6 +2453,75 @@ func CheckRepoStats(ctx context.Context) error {
}
}
// ***** END: Repository.NumForks *****
+
+ // ***** START: Repository.DatasetCnt *****
+ desc = "repository count 'dataset_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.dataset_cnt!=(select count(1) from attachment inner join dataset on attachment.dataset_id = dataset.id where dataset.repo_id = repository.id)")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoDatasetNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.DatasetCnt *****
+
+ // ***** START: Repository.ModelCnt *****
+ desc = "repository count 'model_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.model_cnt!=(select count(1) from ai_model_manage where repository.id = ai_model_manage.repo_id and ai_model_manage.size > 0 )")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoModelNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.ModelCnt *****
+
+ // ***** START: Repository.AiTaskCnt *****
+ desc = "repository count 'ai_task_cnt'"
+ results, err = x.Query("SELECT repository.id FROM `repository` WHERE repository.ai_task_cnt!=(select count(1) from cloudbrain where repository.id = cloudbrain.repo_id and (cloudbrain.deleted_at is null or cloudbrain.deleted_at = '0001-01-01 00:00:00') )")
+ if err != nil {
+ log.Error("Select %s: %v", desc, err)
+ } else {
+ for _, result := range results {
+ id := com.StrTo(result["id"]).MustInt64()
+ select {
+ case <-ctx.Done():
+ log.Warn("CheckRepoStats: Cancelled")
+ return ErrCancelledf("during %s for repo ID %d", desc, id)
+ default:
+ }
+ log.Trace("Updating %s: %d", desc, id)
+ err = ResetRepoAITaskNum(id)
+ if err != nil {
+ log.Error("Update %s[%d]: %v", desc, id, err)
+ }
+ }
+ }
+ // ***** END: Repository.AiTaskCnt *****
return nil
}
@@ -2775,3 +2918,85 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
}
return &RepoFile{CommitId: commitId, Content: d}, nil
}
+
+func ResetRepoAITaskNum(repoId int64) error {
+ n, err := x.Where("repo_id = ? ", repoId).Count(&Cloudbrain{})
+ if err != nil {
+ return err
+ }
+ r := Repository{
+ AiTaskCnt: n,
+ }
+ _, err = x.Cols("ai_task_cnt").Where("id = ?", repoId).Update(&r)
+ return err
+}
+
+func ResetRepoDatasetNum(repoId int64) error {
+ n, err := x.Table("attachment").Join("inner", "dataset", "attachment.dataset_id = dataset.id").Where("dataset.repo_id = ?", repoId).Count()
+ if err != nil {
+ return err
+ }
+ r := Repository{
+ DatasetCnt: n,
+ }
+ _, err = x.Cols("dataset_cnt").Where("id = ?", repoId).Update(&r)
+ return err
+}
+
+func ResetRepoModelNum(repoId int64) error {
+ _, err := x.Exec("update repository set model_cnt = (select count(1) from ai_model_manage where ai_model_manage.repo_id = ? and size > 0) where id = ?", repoId, repoId)
+ return err
+}
+
+func operateRepoCol(repoId int64, colName string, amount int64, engines ...*xorm.Engine) error {
+ var err error
+
+ if amount == 0 {
+ return nil
+ }
+ var ee *xorm.Engine
+ if len(engines) == 0 {
+ ee = x
+ } else {
+ ee = engines[0]
+ }
+ if amount > 0 {
+ _, err = ee.Exec(fmt.Sprintf("update repository set %s = %s + ? where id = ?", colName, colName), amount, repoId)
+ } else {
+ _, err = ee.Exec(fmt.Sprintf("update repository set %s = %s - ? where id = ?", colName, colName), -1*amount, repoId)
+ }
+
+ return err
+}
+
+func OperateRepoDatasetNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "dataset_cnt", amount, engines...)
+}
+
+func OperateRepoModelNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "model_cnt", amount, engines...)
+}
+
+func OperateRepoAITaskNum(repoId int64, amount int64, engines ...*xorm.Engine) error {
+ return operateRepoCol(repoId, "ai_task_cnt", amount, engines...)
+}
+
+func UpdateRepositoryLastFourMonthCommits(repoID int64, amount int64) error {
+ _, err := x.Exec("update repository set last_four_month_commits = ? where id = ?", amount, repoID)
+ return err
+}
+func UpdateRepositoryLastMonthVisits(repoID int64, amount int64) error {
+ _, err := x.Exec("update repository set last_month_visits = ? where id = ?", amount, repoID)
+ return err
+}
+
+func SyncStatDataToRepo(repo *Repository) {
+ //Save the visit number of repository in the last month
+ if lv, err := SumLastMonthNumVisits(repo.ID); err == nil {
+ UpdateRepositoryLastMonthVisits(repo.ID, lv)
+ }
+ //Save the commits number of repository in the last four month
+ if lc, err := SumLastFourMonthNumCommits(repo.ID); err == nil {
+ UpdateRepositoryLastFourMonthCommits(repo.ID, lc)
+ }
+}
diff --git a/models/repo_list.go b/models/repo_list.go
index 92654c11c..3c655fbd9 100755
--- a/models/repo_list.go
+++ b/models/repo_list.go
@@ -201,29 +201,41 @@ func (s SearchOrderBy) String() string {
return string(s)
}
+type FindReposResponse struct {
+ Repos []*Repository4Card
+ Page int
+ PageSize int
+ Total int64
+}
+
// Strings for sorting result
const (
- SearchOrderByAlphabetically SearchOrderBy = "name ASC"
- SearchOrderByAlphabeticallyReverse SearchOrderBy = "name DESC"
- SearchOrderByLeastUpdated SearchOrderBy = "updated_unix ASC"
- SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC"
- SearchOrderByOldest SearchOrderBy = "created_unix ASC"
- SearchOrderByNewest SearchOrderBy = "created_unix DESC"
- SearchOrderBySize SearchOrderBy = "size ASC"
- SearchOrderBySizeReverse SearchOrderBy = "size DESC"
- SearchOrderByID SearchOrderBy = "id ASC"
- SearchOrderByIDReverse SearchOrderBy = "id DESC"
- SearchOrderByStars SearchOrderBy = "num_stars ASC"
- SearchOrderByStarsReverse SearchOrderBy = "num_stars DESC"
- SearchOrderByForks SearchOrderBy = "num_forks ASC"
- SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
- SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC"
- SearchOrderByUseCount SearchOrderBy = "use_count ASC"
- SearchOrderByUseCountReverse SearchOrderBy = "use_count DESC"
- SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
- SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
- SearchOrderByWatches SearchOrderBy = "num_watches DESC"
- SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC"
+ SearchOrderByAlphabetically SearchOrderBy = "name ASC"
+ SearchOrderByAlphabeticallyReverse SearchOrderBy = "name DESC"
+ SearchOrderByLeastUpdated SearchOrderBy = "updated_unix ASC"
+ SearchOrderByRecentUpdated SearchOrderBy = "updated_unix DESC"
+ SearchOrderByOldest SearchOrderBy = "created_unix ASC"
+ SearchOrderByNewest SearchOrderBy = "created_unix DESC"
+ SearchOrderBySize SearchOrderBy = "size ASC"
+ SearchOrderBySizeReverse SearchOrderBy = "size DESC"
+ SearchOrderByID SearchOrderBy = "id ASC"
+ SearchOrderByIDReverse SearchOrderBy = "id DESC"
+ SearchOrderByStars SearchOrderBy = "num_stars ASC"
+ SearchOrderByStarsReverse SearchOrderBy = "num_stars DESC"
+ SearchOrderByForks SearchOrderBy = "num_forks ASC"
+ SearchOrderByForksReverse SearchOrderBy = "num_forks DESC"
+ SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC"
+ SearchOrderByUseCount SearchOrderBy = "use_count ASC"
+ SearchOrderByUseCountReverse SearchOrderBy = "use_count DESC"
+ SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
+ SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
+ SearchOrderByWatches SearchOrderBy = "num_watches DESC"
+ SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC"
+ SearchOrderByAiTaskCntReverse SearchOrderBy = "ai_task_cnt desc"
+ SearchOrderByModelCntReverse SearchOrderBy = "model_cnt desc"
+ SearchOrderByDatasetCntReverse SearchOrderBy = "dataset_cnt desc"
+ SearchOrderByLastMonthVisitsReverse SearchOrderBy = "last_month_visits desc"
+ SearchOrderByLastFourMonthCommitsReverse SearchOrderBy = "last_four_month_commits desc"
)
// SearchRepositoryCondition creates a query condition according search repository options
diff --git a/models/repo_statistic.go b/models/repo_statistic.go
index 4f8f13ed7..ecdd77e57 100755
--- a/models/repo_statistic.go
+++ b/models/repo_statistic.go
@@ -200,3 +200,23 @@ func UpdateRepoStatVisits(repoStat *RepoStatistic) error {
_, err := xStatistic.Exec(sql, repoStat.NumVisits, repoStat.RepoID, repoStat.Date)
return err
}
+
+func SumRepoStatColumn(begin, end time.Time, repoId int64, columnName string) (int64, error) {
+ res, err := xStatistic.Where("created_unix <= ? and created_unix >= ? and repo_id = ? ", end.Unix(), begin.Unix(), repoId).Sum(&RepoStatistic{}, columnName)
+ if err != nil {
+ return 0, err
+ }
+ return int64(res), nil
+}
+
+func SumLastMonthNumVisits(repoId int64) (int64, error) {
+ end := time.Now()
+ begin := end.AddDate(0, 0, -30)
+ return SumRepoStatColumn(begin, end, repoId, "num_visits")
+}
+
+func SumLastFourMonthNumCommits(repoId int64) (int64, error) {
+ end := time.Now()
+ begin := end.AddDate(0, 0, -120)
+ return SumRepoStatColumn(begin, end, repoId, "num_commits_added")
+}
diff --git a/models/repo_tag.go b/models/repo_tag.go
index 730eb3f2a..4585a95b6 100644
--- a/models/repo_tag.go
+++ b/models/repo_tag.go
@@ -4,6 +4,7 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"fmt"
+ "xorm.io/builder"
)
type OfficialTag struct {
@@ -166,3 +167,33 @@ func GetAllOfficialTags() ([]OfficialTag, error) {
}
return o, nil
}
+
+type FindSelectedReposOpts struct {
+ ListOptions
+ OrgId int64
+ OnlyPublic bool
+}
+
+func GetSelectedRepos(opts FindSelectedReposOpts) ([]*Repository, error) {
+ if opts.Page < 1 {
+ opts.Page = 1
+ }
+ var cond = builder.NewCond()
+ cond = cond.And(builder.Eq{"official_tag.code": "selected"})
+ if opts.OrgId > 0 {
+ cond = cond.And(builder.Eq{"official_tag_repos.org_id": opts.OrgId})
+ }
+ if opts.OnlyPublic {
+ cond = cond.And(builder.Eq{"repository.is_private": false})
+ }
+ t := make([]*Repository, 0)
+ err := x.Join("inner", "official_tag_repos", "repository.id = official_tag_repos.repo_id").
+ Join("inner", "official_tag", "official_tag.id = official_tag_repos.tag_id").
+ Where(cond).OrderBy("repository.updated_unix desc").Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).Find(&t)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return t, nil
+}
diff --git a/models/resource_specification.go b/models/resource_specification.go
index 2f815818b..809a3496a 100644
--- a/models/resource_specification.go
+++ b/models/resource_specification.go
@@ -3,6 +3,7 @@ package models
import (
"code.gitea.io/gitea/modules/timeutil"
"fmt"
+ "strings"
"xorm.io/builder"
)
@@ -197,12 +198,104 @@ type Specification struct {
AiCenterName string
IsExclusive bool
ExclusiveOrg string
+ //specs that have the same sourceSpecId, computeResource and cluster as current spec
+ RelatedSpecs []*Specification
}
func (Specification) TableName() string {
return "resource_specification"
}
+func (s *Specification) loadRelatedSpecs() {
+ if s.RelatedSpecs != nil {
+ return
+ }
+ defaultSpecs := make([]*Specification, 0)
+ if s.SourceSpecId == "" {
+ s.RelatedSpecs = defaultSpecs
+ return
+ }
+ r, err := FindSpecs(FindSpecsOptions{
+ ComputeResource: s.ComputeResource,
+ Cluster: s.Cluster,
+ SourceSpecId: s.SourceSpecId,
+ RequestAll: true,
+ SpecStatus: SpecOnShelf,
+ })
+ if err != nil {
+ s.RelatedSpecs = defaultSpecs
+ return
+ }
+ s.RelatedSpecs = r
+}
+func (s *Specification) GetAvailableCenterIds(userIds ...int64) []string {
+ s.loadRelatedSpecs()
+
+ if len(s.RelatedSpecs) == 0 {
+ return make([]string, 0)
+ }
+
+ var uId int64
+ if len(userIds) > 0 {
+ uId = userIds[0]
+ }
+ //filter exclusive specs
+ specs := FilterExclusiveSpecs(s.RelatedSpecs, uId)
+
+ centerIds := make([]string, len(specs))
+ for i, v := range specs {
+ centerIds[i] = v.AiCenterCode
+ }
+ return centerIds
+}
+
+func FilterExclusiveSpecs(r []*Specification, userId int64) []*Specification {
+ if userId == 0 {
+ return r
+ }
+ specs := make([]*Specification, 0, len(r))
+ specMap := make(map[int64]string, 0)
+ for i := 0; i < len(r); i++ {
+ spec := r[i]
+ if _, has := specMap[spec.ID]; has {
+ continue
+ }
+ if !spec.IsExclusive {
+ specs = append(specs, spec)
+ specMap[spec.ID] = ""
+ continue
+ }
+ orgs := strings.Split(spec.ExclusiveOrg, ";")
+ for _, org := range orgs {
+ isMember, _ := IsOrganizationMemberByOrgName(org, userId)
+ if isMember {
+ specs = append(specs, spec)
+ specMap[spec.ID] = ""
+ break
+ }
+ }
+ }
+ return specs
+}
+
+func DistinctSpecs(r []*Specification) []*Specification {
+ specs := make([]*Specification, 0, len(r))
+ sourceSpecIdMap := make(map[string]string, 0)
+ for i := 0; i < len(r); i++ {
+ spec := r[i]
+ if spec.SourceSpecId == "" {
+ specs = append(specs, spec)
+ continue
+ }
+ if _, has := sourceSpecIdMap[spec.SourceSpecId]; has {
+ continue
+ }
+ specs = append(specs, spec)
+ sourceSpecIdMap[spec.SourceSpecId] = ""
+ }
+ return specs
+}
+
func InsertResourceSpecification(r ResourceSpecification) (int64, error) {
return x.Insert(&r)
}
diff --git a/models/task_config.go b/models/task_config.go
index 0d9d21187..f86032fc9 100644
--- a/models/task_config.go
+++ b/models/task_config.go
@@ -36,6 +36,8 @@ func GetTaskTypeFromAction(a ActionType) TaskType {
ActionCreateInferenceTask,
ActionCreateBenchMarkTask,
ActionCreateGPUTrainTask,
+ ActionCreateGrampusGPUDebugTask,
+ ActionCreateGrampusNPUDebugTask,
ActionCreateGrampusNPUTrainTask,
ActionCreateGrampusGPUTrainTask:
return TaskCreateCloudbrainTask
diff --git a/models/topic.go b/models/topic.go
index 0b19bc1f0..ea5698f4c 100644
--- a/models/topic.go
+++ b/models/topic.go
@@ -9,6 +9,7 @@ import (
"regexp"
"strings"
"unicode/utf8"
+ "xorm.io/xorm"
"code.gitea.io/gitea/modules/timeutil"
@@ -337,3 +338,16 @@ func GetOrgTopics(orgId int64) ([]Topic, error) {
return result, nil
}
+
+func UpdateRepoTopics(repoID int64, topicNames []string, sess ...*xorm.Engine) error {
+ e := x
+ if len(sess) > 0 {
+ e = sess[0]
+ }
+ if _, err := e.ID(repoID).Cols("topics").Update(&Repository{
+ Topics: topicNames,
+ }); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/models/user.go b/models/user.go
index c421455bc..dad252d92 100755
--- a/models/user.go
+++ b/models/user.go
@@ -198,6 +198,40 @@ type SearchOrganizationsOptions struct {
All bool
}
+type User4Front struct {
+ ID int64
+ LowerName string `xorm:"UNIQUE NOT NULL"`
+ Name string `xorm:"UNIQUE NOT NULL"`
+ FullName string
+ Email string `xorm:"NOT NULL"`
+ Language string `xorm:"VARCHAR(5)"`
+ Description string
+ RelAvatarLink string
+ NumMembers int
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+}
+
+func (u *User) ToFrontFormat() *User4Front {
+ uf := &User4Front{
+ ID: u.ID,
+ LowerName: u.LowerName,
+ Name: u.Name,
+ FullName: u.FullName,
+ Email: u.Email,
+ Language: u.Language,
+ Description: u.Description,
+ CreatedUnix: u.CreatedUnix,
+ UpdatedUnix: u.UpdatedUnix,
+ NumMembers: u.NumMembers,
+ }
+ if !u.KeepEmailPrivate {
+ uf.Email = u.Email
+ }
+ uf.RelAvatarLink = u.RelAvatarLink()
+ return uf
+}
+
// GenerateRandomAvatar generates a random avatar for user.
func (u *User) IsBindWechat() bool {
return u.WechatOpenId != ""
diff --git a/models/user_analysis_for_activity.go b/models/user_analysis_for_activity.go
index 2066697d2..99ff990ce 100644
--- a/models/user_analysis_for_activity.go
+++ b/models/user_analysis_for_activity.go
@@ -449,3 +449,20 @@ func QueryUserLoginInfo(userIds []int64) []*UserLoginLog {
return loginList
}
+
+func QueryUserAnnualReport(userId int64) *UserSummaryCurrentYear {
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+ log.Info("userId=" + fmt.Sprint(userId))
+
+ reList := make([]*UserSummaryCurrentYear, 0)
+ err := statictisSess.Select("*").Table(new(UserSummaryCurrentYear)).Where("id=" + fmt.Sprint(userId)).Find(&reList)
+ if err == nil {
+ if len(reList) > 0 {
+ return reList[0]
+ }
+ } else {
+ log.Info("error:=" + err.Error())
+ }
+ return nil
+}
diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go
index 394c24825..d5ab871ce 100644
--- a/models/user_business_analysis.go
+++ b/models/user_business_analysis.go
@@ -132,11 +132,17 @@ func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics {
if count > 0 {
for _, userMetrics := range allUserInfo {
dateTime := time.Unix(userMetrics.CountDate, 0)
- month := fmt.Sprint(dateTime.Year()) + "-" + fmt.Sprint(int(dateTime.Month()))
+ mInt := int(dateTime.Month())
+ mString := fmt.Sprint(mInt)
+ if mInt < 10 {
+ mString = "0" + mString
+ }
+ month := fmt.Sprint(dateTime.Year()) + "-" + mString
if _, ok := monthMap[month]; !ok {
monthUserMetrics := &UserMetrics{
DisplayDate: month,
ActivateRegistUser: userMetrics.ActivateRegistUser,
+ RegistActivityUser: userMetrics.RegistActivityUser,
NotActivateRegistUser: userMetrics.NotActivateRegistUser,
TotalUser: userMetrics.TotalUser,
TotalNotActivateRegistUser: userMetrics.TotalUser - userMetrics.TotalActivateRegistUser,
@@ -152,6 +158,7 @@ func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics {
value.ActivateRegistUser += userMetrics.ActivateRegistUser
value.NotActivateRegistUser += userMetrics.NotActivateRegistUser
value.HasActivityUser += userMetrics.HasActivityUser
+ value.RegistActivityUser += userMetrics.RegistActivityUser
value.TotalRegistUser += userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser
value.ActivateIndex = float64(value.ActivateRegistUser) / float64(value.TotalRegistUser)
value.DaysForMonth += 1
@@ -348,6 +355,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
@@ -420,6 +428,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
+ dateRecord.ModelConvertCount = getMapValue(dateRecord.ID, AiModelConvertMap)
dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
@@ -539,6 +548,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitDatasetNum += userRecord.CommitDatasetNum
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
+ resultMap[userRecord.ID].ModelConvertCount += userRecord.ModelConvertCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
@@ -576,7 +586,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
startTime := currentTimeNow.AddDate(0, 0, -1)
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
- CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
+ CommitCountMap, _ := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryCreateIssue(start_unix, end_unix)
CommentCountMap := queryComment(start_unix, end_unix)
@@ -592,29 +602,25 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
//log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
}
//CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
- CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
+ CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
- CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
+ CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix)
LoginCountMap := queryLoginCount(start_unix, end_unix)
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
- RecommendDataset, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
+ RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)
InvitationMap := queryUserInvitationCount(start_unix, end_unix)
DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"
- bonusMap := make(map[string]map[string]int)
- if tableName == "user_business_analysis_current_year" {
- bonusMap = getBonusMap()
- log.Info("truncate all data from table:user_summary_current_year ")
- statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
- }
+
cond := "type != 1 and is_active=true"
count, err := sess.Where(cond).Count(new(User))
if err != nil {
@@ -680,6 +686,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
+ dateRecordAll.ModelConvertCount = getMapValue(dateRecordAll.ID, AiModelConvertMap)
dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset)
@@ -712,37 +719,6 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
userMetrics["TotalHasActivityUser"] = getMapKeyStringValue("TotalHasActivityUser", userMetrics) + 1
}
}
- if tableName == "user_business_analysis_current_year" {
- //年度数据
- subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
- mostActiveDay := ""
- if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
- mostActiveDay = getMostActiveJson(userInfo)
- }
- scoreMap := make(map[string]float64)
- repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
- dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
- scoreMap["datasetscore"] = datasetscore
- codeInfo, codescore := getCodeInfo(dateRecordAll)
- scoreMap["codescore"] = codescore
- cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
- playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
- re := &UserSummaryCurrentYear{
- ID: dateRecordAll.ID,
- Name: dateRecordAll.Name,
- Email: dateRecordAll.Email,
- Phone: dateRecordAll.Phone,
- RegistDate: dateRecordAll.RegistDate,
- DateCount: int(subTime.Hours()) / 24,
- MostActiveDay: mostActiveDay,
- RepoInfo: repoInfo,
- DataSetInfo: dataSetInfo,
- CodeInfo: codeInfo,
- CloudBrainInfo: cloudBrainInfo,
- PlayARoll: playARoll,
- }
- statictisSess.Insert(re)
- }
}
if len(dateRecordBatch) > 0 {
err := insertTable(dateRecordBatch, tableName, statictisSess)
@@ -772,6 +748,138 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount))
}
+func RefreshUserYearTable(pageStartTime time.Time, pageEndTime time.Time) {
+ sess := x.NewSession()
+ defer sess.Close()
+ log.Info("RefreshUserYearTable start....")
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+
+ log.Info("UserYear StartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
+ log.Info("UserYear EndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
+
+ start_unix := pageStartTime.Unix()
+ end_unix := pageEndTime.Unix()
+
+ CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
+ CommitCountMap, mostActiveMap := queryCommitAction(start_unix, end_unix, 5)
+ IssueCountMap := queryCreateIssue(start_unix, end_unix)
+
+ CommentCountMap := queryComment(start_unix, end_unix)
+
+ CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime)
+ if err != nil {
+ log.Info("query commit code errr.")
+ } else {
+ log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
+ }
+ CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix)
+ SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
+ CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix)
+
+ CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
+
+ _, CollectedDataset := queryDatasetStars(start_unix, end_unix)
+ _, CreatedDataset := queryRecommedDataSet(start_unix, end_unix)
+
+ bonusMap := getBonusMap()
+ log.Info("truncate all data from table:user_summary_current_year ")
+ statictisSess.Exec("TRUNCATE TABLE user_summary_current_year")
+
+ cond := "type != 1 and is_active=true"
+ count, err := sess.Where(cond).Count(new(User))
+ if err != nil {
+ log.Info("query user error. return.")
+ return
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ userList := make([]*User, 0)
+ sess.Find(&userList)
+ for _, userRecord := range userList {
+ var dateRecordAll UserBusinessAnalysisAll
+ dateRecordAll.ID = userRecord.ID
+ dateRecordAll.Email = userRecord.Email
+ dateRecordAll.Phone = userRecord.PhoneNumber
+ dateRecordAll.RegistDate = userRecord.CreatedUnix
+ dateRecordAll.Name = userRecord.Name
+
+ dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap)
+ dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap)
+ dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
+ dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
+
+ if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok {
+ dateRecordAll.CommitCodeSize = 0
+ } else {
+ dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines)
+ }
+ //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
+ dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap)
+ dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap)
+ dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap)
+ dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap)
+
+ dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap)
+ dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
+ dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
+ dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
+
+ //年度数据
+ subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC())
+ mostActiveDay := ""
+ if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok {
+ mostActiveDay = getMostActiveJson(userInfo)
+ }
+ scoreMap := make(map[string]float64)
+ repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap)
+ dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset)
+ scoreMap["datasetscore"] = datasetscore
+ codeInfo, codescore := getCodeInfo(dateRecordAll)
+ scoreMap["codescore"] = codescore
+ cloudBrainInfo := getCloudBrainInfo(dateRecordAll, CloudBrainTaskItemMap, scoreMap)
+ playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap)
+ re := &UserSummaryCurrentYear{
+ ID: dateRecordAll.ID,
+ Name: dateRecordAll.Name,
+ Email: dateRecordAll.Email,
+ Phone: dateRecordAll.Phone,
+ RegistDate: dateRecordAll.RegistDate,
+ DateCount: int(subTime.Hours()) / 24,
+ MostActiveDay: mostActiveDay,
+ RepoInfo: repoInfo,
+ DataSetInfo: dataSetInfo,
+ CodeInfo: codeInfo,
+ CloudBrainInfo: cloudBrainInfo,
+ PlayARoll: playARoll,
+ }
+ statictisSess.Insert(re)
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ log.Info("update user year data finished. ")
+}
+
+func isUserYearData(tableName string) bool {
+ if tableName == "user_business_analysis_current_year" {
+ currentTimeNow := time.Now()
+ if currentTimeNow.Year() >= 2023 {
+ return false
+ }
+ return true
+ }
+ return false
+}
+
func getBonusMap() map[string]map[string]int {
bonusMap := make(map[string]map[string]int)
url := setting.RecommentRepoAddr + "bonus/record.txt"
@@ -794,6 +902,7 @@ func getBonusMap() map[string]map[string]int {
record, ok := bonusMap[userName]
if !ok {
record = make(map[string]int)
+ bonusMap[userName] = record
}
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3])
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4])
@@ -979,7 +1088,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
insertBatchSql := "INSERT INTO public." + tableName +
"(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
- "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone,invitation_user_num) " +
+ "commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive,phone,invitation_user_num,model_convert_count) " +
"VALUES"
for i, record := range dateRecords {
@@ -988,7 +1097,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," +
- fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "'" + "," + fmt.Sprint(record.InvitationUserNum) + ")"
+ fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ",'" + record.Phone + "'" + "," + fmt.Sprint(record.InvitationUserNum) + "," + fmt.Sprint(record.ModelConvertCount) + ")"
if i < (len(dateRecords) - 1) {
insertBatchSql += ","
}
@@ -1079,6 +1188,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)
+ AiModelConvertMap := queryUserModelConvert(start_unix, end_unix)
CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset, _ := queryRecommedDataSet(start_unix, end_unix)
@@ -1160,7 +1270,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
-
+ dateRecord.ModelConvertCount = getMapValue(dateRecord.ID, AiModelConvertMap)
dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
@@ -1349,6 +1459,7 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += float64(dateRecord.ModelConvertCount) * getParaWeightValue("ModelConvertCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
@@ -1374,6 +1485,7 @@ func getUserActivateAll(dateRecord UserBusinessAnalysisAll) int {
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
+ result += dateRecord.ModelConvertCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
@@ -1395,6 +1507,7 @@ func getUserActivate(dateRecord UserBusinessAnalysis) int {
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
+ result += dateRecord.ModelConvertCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
@@ -1431,6 +1544,7 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
result += float64(dateRecord.CloudBrainTaskNum) * getParaWeightValue("CloudBrainTaskNum", ParaWeight, 0.3)
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
+ result += float64(dateRecord.ModelConvertCount) * getParaWeightValue("ModelConvertCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)
result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
@@ -1475,10 +1589,6 @@ func getInt(str string) int {
return int(re)
}
-func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) {
- CounDataByDateAndReCount(wikiCountMap, startTime, endTime, false)
-}
-
func querySolveIssue(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
@@ -2259,6 +2369,38 @@ func queryUserModel(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}
+func queryUserModelConvert(start_unix int64, end_unix int64) map[int64]int {
+ sess := x.NewSession()
+ defer sess.Close()
+ resultMap := make(map[int64]int)
+ cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
+ count, err := sess.Where(cond).Count(new(AiModelConvert))
+ if err != nil {
+ log.Info("query AiModelConvert error. return.")
+ return resultMap
+ }
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("id,user_id").Table("ai_model_convert").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ aiModelList := make([]*AiModelConvert, 0)
+ sess.Find(&aiModelList)
+ log.Info("query AiModelConvert size=" + fmt.Sprint(len(aiModelList)))
+ for _, aiModelRecord := range aiModelList {
+ if _, ok := resultMap[aiModelRecord.UserId]; !ok {
+ resultMap[aiModelRecord.UserId] = 1
+ } else {
+ resultMap[aiModelRecord.UserId] += 1
+ }
+ }
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ return resultMap
+}
+
func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int) {
sess := x.NewSession()
defer sess.Close()
@@ -2424,3 +2566,9 @@ func GetContentFromPromote(url string) (string, error) {
allLineStr := string(bytes)
return allLineStr, nil
}
+
+func QueryLast30DaysHighestIndexUsers(size int) ([]int64, error) {
+ userIds := make([]int64, 0)
+ err := xStatistic.Table("user_business_analysis_last30_day").Cols("id").OrderBy("user_index desc").Limit(size).Find(&userIds)
+ return userIds, err
+}
diff --git a/models/user_business_struct.go b/models/user_business_struct.go
index 9dcc12342..00c7f6176 100644
--- a/models/user_business_struct.go
+++ b/models/user_business_struct.go
@@ -89,6 +89,7 @@ type UserBusinessAnalysisCurrentYear struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLast30Day struct {
@@ -157,6 +158,7 @@ type UserBusinessAnalysisLast30Day struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLastMonth struct {
@@ -225,6 +227,7 @@ type UserBusinessAnalysisLastMonth struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisCurrentMonth struct {
@@ -293,6 +296,7 @@ type UserBusinessAnalysisCurrentMonth struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisCurrentWeek struct {
@@ -362,6 +366,7 @@ type UserBusinessAnalysisCurrentWeek struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisYesterday struct {
@@ -431,6 +436,7 @@ type UserBusinessAnalysisYesterday struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysisLastWeek struct {
@@ -500,6 +506,7 @@ type UserBusinessAnalysisLastWeek struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserAnalysisPara struct {
@@ -616,6 +623,7 @@ type UserBusinessAnalysisAll struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
type UserBusinessAnalysis struct {
@@ -704,4 +712,5 @@ type UserBusinessAnalysis struct {
Phone string `xorm:"NULL"`
InvitationUserNum int `xorm:"NOT NULL DEFAULT 0"`
+ ModelConvertCount int `xorm:"NOT NULL DEFAULT 0"`
}
diff --git a/models/user_invitation.go b/models/user_invitation.go
index 2d37bcb23..8a10e71a0 100644
--- a/models/user_invitation.go
+++ b/models/user_invitation.go
@@ -13,6 +13,7 @@ type Invitation struct {
SrcUserID int64 `xorm:"NOT NULL DEFAULT 0"`
UserID int64 `xorm:"NOT NULL DEFAULT 0"`
Phone string `xorm:"INDEX"`
+ Email string `xorm:"-"`
Avatar string `xorm:"-"`
Name string `xorm:"-"`
InvitationUserNum int `xorm:"-"`
diff --git a/modules/auth/grampus.go b/modules/auth/grampus.go
index 414a7c25d..f8a238124 100755
--- a/modules/auth/grampus.go
+++ b/modules/auth/grampus.go
@@ -29,3 +29,24 @@ type CreateGrampusTrainJobForm struct {
func (f *CreateGrampusTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
return validate(errs, ctx.Data, f, ctx.Locale)
}
+
+type CreateGrampusNotebookForm struct {
+ Type int `form:"type"`
+ DisplayJobName string `form:"display_job_name" binding:"Required"`
+ Attachment string `form:"attachment"`
+ ImageID string `form:"image_id" binding:"Required"`
+ Description string `form:"description"`
+ BranchName string `form:"branch_name" binding:"Required"`
+ Image string `form:"image" binding:"Required"`
+ DatasetName string `form:"dataset_name"`
+ ModelName string `form:"model_name"`
+ ModelVersion string `form:"model_version"`
+ CkptName string `form:"ckpt_name"`
+ LabelName string `form:"label_names"`
+ PreTrainModelUrl string `form:"pre_train_model_url"`
+ SpecId int64 `form:"spec_id" binding:"Required"`
+}
+
+func (f *CreateGrampusNotebookForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
+ return validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/modules/auth/modelarts.go b/modules/auth/modelarts.go
index 0221c51d8..0061648ce 100755
--- a/modules/auth/modelarts.go
+++ b/modules/auth/modelarts.go
@@ -16,13 +16,18 @@ func (f *CreateModelArtsForm) Validate(ctx *macaron.Context, errs binding.Errors
}
type CreateModelArtsNotebookForm struct {
- DisplayJobName string `form:"display_job_name" binding:"Required"`
- JobName string `form:"job_name" binding:"Required"`
- Attachment string `form:"attachment"`
- Description string `form:"description"`
- Flavor string `form:"flavor" binding:"Required"`
- ImageId string `form:"image_id" binding:"Required"`
- SpecId int64 `form:"spec_id" binding:"Required"`
+ DisplayJobName string `form:"display_job_name" binding:"Required"`
+ JobName string `form:"job_name" binding:"Required"`
+ Attachment string `form:"attachment"`
+ Description string `form:"description"`
+ Flavor string `form:"flavor" binding:"Required"`
+ ImageId string `form:"image_id" binding:"Required"`
+ ModelName string `form:"model_name"`
+ ModelVersion string `form:"model_version"`
+ CkptName string `form:"ckpt_name"`
+ LabelName string `form:"label_names"`
+ PreTrainModelUrl string `form:"pre_train_model_url"`
+ SpecId int64 `form:"spec_id" binding:"Required"`
}
func (f *CreateModelArtsNotebookForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go
index 6111cf460..c85f4b8cd 100755
--- a/modules/cloudbrain/cloudbrain.go
+++ b/modules/cloudbrain/cloudbrain.go
@@ -5,6 +5,7 @@ import (
"errors"
"os"
"strconv"
+ "strings"
"code.gitea.io/gitea/modules/timeutil"
@@ -145,7 +146,7 @@ func isAdminOrImageCreater(ctx *context.Context, image *models.Image, err error)
func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) {
var id = ctx.Params(":id")
- job, err := GetCloudBrainByIdOrJobId(id)
+ job, err := GetCloudBrainByIdOrJobId(id, "id")
if err != nil {
log.Error("GetCloudbrainByID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -161,7 +162,7 @@ func AdminOrOwnerOrJobCreaterRight(ctx *context.Context) {
func AdminOrJobCreaterRight(ctx *context.Context) {
var id = ctx.Params(":id")
- job, err := GetCloudBrainByIdOrJobId(id)
+ job, err := GetCloudBrainByIdOrJobId(id, "id")
if err != nil {
log.Error("GetCloudbrainByID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -177,7 +178,7 @@ func AdminOrJobCreaterRight(ctx *context.Context) {
func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
- job, err := GetCloudBrainByIdOrJobId(jobID)
+ job, err := GetCloudBrainByIdOrJobId(jobID, "jobid")
if err != nil {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -193,7 +194,7 @@ func AdminOrOwnerOrJobCreaterRightForTrain(ctx *context.Context) {
func AdminOrJobCreaterRightForTrain(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
- job, err := GetCloudBrainByIdOrJobId(jobID)
+ job, err := GetCloudBrainByIdOrJobId(jobID, "jobid")
if err != nil {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
@@ -490,6 +491,21 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
}
}
+ if task.PreTrainModelUrl != "" { //预训练
+ _, err := models.QueryModelByPath(task.PreTrainModelUrl)
+ if err != nil {
+ log.Warn("The model may be deleted", err)
+ } else {
+ volumes = append(volumes, models.Volume{
+ HostPath: models.StHostPath{
+ Path: setting.Attachment.Minio.RealPath + task.PreTrainModelUrl,
+ MountPath: PretrainModelMountPath,
+ ReadOnly: true,
+ },
+ })
+ }
+ }
+
createTime := timeutil.TimeStampNow()
jobResult, err := CreateJob(jobName, models.CreateJobParams{
JobName: jobName,
@@ -540,10 +556,16 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
GpuQueue: task.GpuQueue,
ResourceSpecId: task.ResourceSpecId,
ComputeResource: task.ComputeResource,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- BranchName: task.BranchName,
- Spec: spec,
+
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ BranchName: task.BranchName,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
}
err = models.RestartCloudbrain(task, newTask)
@@ -653,18 +675,45 @@ func IsElementExist(s []string, str string) bool {
return false
}
-func GetCloudBrainByIdOrJobId(id string) (*models.Cloudbrain,error) {
+func GetCloudBrainByIdOrJobId(id string, initialQuery string) (*models.Cloudbrain, error) {
_, err := strconv.ParseInt(id, 10, 64)
var job *models.Cloudbrain
if err != nil {
job, err = models.GetCloudbrainByJobID(id)
} else {
- job, err = models.GetCloudbrainByID(id)
- if err!=nil{
+
+ if strings.EqualFold(initialQuery, "id") {
+ job, err = models.GetCloudbrainByID(id)
+ if err != nil {
+ job, err = models.GetCloudbrainByJobID(id)
+ }
+ } else {
job, err = models.GetCloudbrainByJobID(id)
+ if err != nil {
+ job, err = models.GetCloudbrainByID(id)
+ }
}
}
- return job,err
+ return job, err
+}
+
+type GenerateModelArtsNotebookReq struct {
+ JobName string
+ DisplayJobName string
+ Uuid string
+ Description string
+
+ BootFile string
+
+ ImageId string
+ AutoStopDurationMs int64
+
+ Spec *models.Specification
+ ModelName string
+ LabelName string
+ CkptName string
+ ModelVersion string
+ PreTrainModelUrl string
}
diff --git a/modules/convert/cloudbrain.go b/modules/convert/cloudbrain.go
index 1487f468e..599da4800 100644
--- a/modules/convert/cloudbrain.go
+++ b/modules/convert/cloudbrain.go
@@ -104,6 +104,7 @@ func ToSpecification(s *models.Specification) *api.SpecificationShow {
func ToTagger(user *models.User) *api.Tagger {
return &api.Tagger{
+ ID: user.ID,
Name: user.Name,
RelAvatarURL: user.RelAvatarLink(),
Email: user.Email,
diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go
index 985a82cdb..5907a3418 100755
--- a/modules/cron/tasks_basic.go
+++ b/modules/cron/tasks_basic.go
@@ -5,10 +5,14 @@
package cron
import (
- "code.gitea.io/gitea/modules/urfs_client/urchin"
"context"
"time"
+ "code.gitea.io/gitea/modules/setting"
+
+ "code.gitea.io/gitea/modules/urfs_client/urchin"
+ cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
+
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward"
@@ -190,6 +194,17 @@ func registerHandleRepoAndUserStatistic() {
})
}
+func registerHandleClearCloudbrainResult() {
+ RegisterTaskFatal("handle_cloudbrain_one_result_clear", &BaseConfig{
+ Enabled: true,
+ RunAtStart: setting.ClearStrategy.RunAtStart,
+ Schedule: setting.ClearStrategy.Cron,
+ }, func(ctx context.Context, _ *models.User, _ Config) error {
+ cloudbrainService.ClearCloudbrainResultSpace()
+ return nil
+ })
+}
+
func registerHandleSummaryStatistic() {
RegisterTaskFatal("handle_summary_statistic", &BaseConfig{
Enabled: true,
@@ -282,7 +297,7 @@ func registerHandleCloudbrainDurationStatistic() {
RegisterTaskFatal("handle_cloudbrain_duration_statistic", &BaseConfig{
Enabled: true,
RunAtStart: false,
- Schedule: "1 0 * * * ?",
+ Schedule: "1 1 * * * ?",
}, func(ctx context.Context, _ *models.User, _ Config) error {
repo.CloudbrainDurationStatisticHour()
return nil
@@ -306,6 +321,7 @@ func initBasicTasks() {
registerHandleRepoAndUserStatistic()
registerHandleSummaryStatistic()
+ registerHandleClearCloudbrainResult()
registerSyncCloudbrainStatus()
registerHandleOrgStatistic()
@@ -317,6 +333,6 @@ func initBasicTasks() {
registerHandleModelSafetyTask()
- registerHandleScheduleRecord()
+ registerHandleScheduleRecord()
registerHandleCloudbrainDurationStatistic()
}
diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go
index 280407240..37e6fc1bf 100755
--- a/modules/grampus/grampus.go
+++ b/modules/grampus/grampus.go
@@ -1,7 +1,8 @@
package grampus
import (
- "encoding/json"
+ "fmt"
+ "strconv"
"strings"
"code.gitea.io/gitea/models"
@@ -26,8 +27,10 @@ const (
CodeArchiveName = "master.zip"
- BucketRemote = "grampus"
- RemoteModelPath = "/output/" + models.ModelSuffix
+ BucketRemote = "grampus"
+ RemoteModelPath = "/output/" + models.ModelSuffix
+ autoStopDurationMs = 4 * 60 * 60 * 1000
+ CommandGpuDebug = "mkdir -p /dataset;%s! [ -x \"$(command -v jupyter)\" ] && pip install jupyterlab==3 -i https://pypi.tuna.tsinghua.edu.cn/simple;jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='/code' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"
)
var (
@@ -37,7 +40,7 @@ var (
SpecialPools *models.SpecialPools
- CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://openi.pcl.ac.cn/OpenIOSSG/%s/archive/master.zip;" +
+ CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" +
"echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;"
)
@@ -81,6 +84,32 @@ type GenerateTrainJobReq struct {
CodeName string
}
+type GenerateNotebookJobReq struct {
+ JobName string
+ Command string
+ ImageUrl string
+ ImageId string
+ DisplayJobName string
+ Uuid string
+ Description string
+ CodeStoragePath string
+ CommitID string
+ BranchName string
+ ComputeResource string
+ ProcessType string
+ DatasetNames string
+ DatasetInfos map[string]models.DatasetInfo
+ ModelName string
+ LabelName string
+ CkptName string
+ ModelVersion string
+ PreTrainModelPath string
+ PreTrainModelUrl string
+ Spec *models.Specification
+ CodeName string
+ ModelPath string //参考启智GPU调试, 挂载/model目录用户的模型可以输出到这个目录
+}
+
func getEndPoint() string {
index := strings.Index(setting.Endpoint, "//")
endpoint := setting.Endpoint[index+2:]
@@ -101,11 +130,154 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram
}
return datasetGrampus
}
+func getDatasetGPUGrampus(datasetInfos map[string]models.DatasetInfo) ([]models.GrampusDataset, string) {
+ var datasetGrampus []models.GrampusDataset
+ var command = ""
+ for uuid, datasetInfo := range datasetInfos {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: datasetInfo.FullName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: datasetInfo.DataLocalPath,
+ ReadOnly: true,
+ ContainerPath: "/dataset1/" + datasetInfo.Name,
+ })
+
+ command += "cp /dataset1/'" + datasetInfo.Name + "'/" + uuid + " /dataset/'" + datasetInfo.FullName + "';"
+
+ }
+ return datasetGrampus, command
+}
-func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
+func GenerateNotebookJob(ctx *context.Context, req *GenerateNotebookJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
- centerID, centerName := getCentersParamter(ctx, req)
+ var datasetGrampus []models.GrampusDataset
+ var codeGrampus models.GrampusDataset
+ var cpCommand string
+ imageUrl := req.ImageUrl
+ if ProcessorTypeNPU == req.ProcessType {
+ datasetGrampus = getDatasetGrampus(req.DatasetInfos)
+ if len(req.ModelName) != 0 {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: req.ModelName,
+ Bucket: setting.Bucket,
+ EndPoint: getEndPoint(),
+ ReadOnly: true,
+ ObjectKey: req.PreTrainModelPath,
+ })
+ }
+
+ codeGrampus = models.GrampusDataset{
+ Name: req.CodeName,
+ Bucket: setting.Bucket,
+ EndPoint: getEndPoint(),
+ ObjectKey: req.CodeStoragePath + cloudbrain.DefaultBranchName + ".zip",
+ ReadOnly: false,
+ }
+ imageUrl = ""
+ req.Command = ""
+ } else {
+ datasetGrampus, cpCommand = getDatasetGPUGrampus(req.DatasetInfos)
+ if len(req.ModelName) != 0 {
+ datasetGrampus = append(datasetGrampus, models.GrampusDataset{
+ Name: req.ModelName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: req.PreTrainModelPath,
+ ReadOnly: true,
+ ContainerPath: cloudbrain.PretrainModelMountPath,
+ })
+ }
+ codeGrampus = models.GrampusDataset{
+ Name: req.CodeName,
+ Bucket: setting.Attachment.Minio.Bucket,
+ EndPoint: setting.Attachment.Minio.Endpoint,
+ ObjectKey: req.CodeStoragePath + cloudbrain.DefaultBranchName + ".zip",
+ ReadOnly: false,
+ ContainerPath: cloudbrain.CodeMountPath,
+ }
+ req.Command = fmt.Sprintf(CommandGpuDebug, cpCommand, setting.CullIdleTimeout, setting.CullIdleTimeout, setting.CullInterval, setting.CullIdleTimeout, setting.CullInterval)
+ log.Info("debug command:" + req.Command)
+
+ }
+
+ jobResult, err := createNotebookJob(models.CreateGrampusNotebookRequest{
+ Name: req.JobName,
+ Tasks: []models.GrampusNotebookTask{
+ {
+ Name: req.JobName,
+ ResourceSpecId: req.Spec.SourceSpecId,
+ ImageId: req.ImageId,
+ ImageUrl: imageUrl,
+ Datasets: datasetGrampus,
+ Code: codeGrampus,
+ AutoStopDuration: autoStopDurationMs,
+ Capacity: setting.Capacity,
+ Command: req.Command,
+ },
+ },
+ })
+ if err != nil {
+ log.Error("createNotebookJob failed: %v", err.Error())
+ return "", err
+ }
+
+ jobID := jobResult.JobInfo.JobID
+ err = models.CreateCloudbrain(&models.Cloudbrain{
+ Status: TransTrainJobStatus(jobResult.JobInfo.Status),
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobID,
+ JobName: req.JobName,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeC2Net,
+ Uuid: req.Uuid,
+ DatasetName: req.DatasetNames,
+ CommitID: req.CommitID,
+ IsLatestVersion: "1",
+ ComputeResource: req.ComputeResource,
+ ImageID: req.ImageId,
+ BranchName: req.BranchName,
+ Description: req.Description,
+ WorkServerNumber: 1,
+ EngineName: req.ImageUrl,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
+ })
+
+ if err != nil {
+ log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error())
+ return "", err
+ }
+
+ var actionType models.ActionType
+ if req.ComputeResource == models.NPUResource {
+ actionType = models.ActionCreateGrampusNPUDebugTask
+ } else if req.ComputeResource == models.GPUResource {
+ actionType = models.ActionCreateGrampusGPUDebugTask
+ }
+ task, err := models.GetCloudbrainByJobID(jobID)
+ if err != nil {
+ log.Error("GetCloudbrainByJobID failed: %v", err.Error())
+ return "", err
+ }
+
+ stringId := strconv.FormatInt(task.ID, 10)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, actionType)
+
+ return jobID, nil
+}
+
+func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
+ createTime := timeutil.TimeStampNow()
var datasetGrampus, modelGrampus []models.GrampusDataset
var codeGrampus models.GrampusDataset
@@ -138,8 +310,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str
ResourceSpecId: req.Spec.SourceSpecId,
ImageId: req.ImageId,
ImageUrl: req.ImageUrl,
- CenterID: centerID,
- CenterName: centerName,
+ CenterID: req.Spec.GetAvailableCenterIds(ctx.User.ID),
ReplicaNum: 1,
Datasets: datasetGrampus,
Models: modelGrampus,
@@ -272,11 +443,6 @@ func TransTrainJobStatus(status string) string {
return strings.ToUpper(status)
}
-func InitSpecialPool() {
- if SpecialPools == nil && setting.Grampus.SpecialPools != "" {
- json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools)
- }
-}
func GetNpuModelRemoteObsUrl(jobName string) string {
return "s3:///" + BucketRemote + "/" + GetNpuModelObjectKey(jobName)
diff --git a/modules/grampus/resty.go b/modules/grampus/resty.go
index a9e1aed5c..a0d5384e2 100755
--- a/modules/grampus/resty.go
+++ b/modules/grampus/resty.go
@@ -26,6 +26,7 @@ const (
urlGetResourceSpecs = urlOpenApiV1 + "resourcespec"
urlGetAiCenter = urlOpenApiV1 + "sharescreen/aicenter"
urlGetImages = urlOpenApiV1 + "image"
+ urlNotebookJob = urlOpenApiV1 + "notebook"
errorIllegalToken = 1005
)
@@ -87,6 +88,39 @@ func getToken() error {
return nil
}
+func createNotebookJob(req models.CreateGrampusNotebookRequest) (*models.GrampusNotebookResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var result models.GrampusNotebookResponse
+
+ retry := 0
+
+sendjob:
+ _, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetAuthToken(TOKEN).
+ SetBody(req).
+ SetResult(&result).
+ Post(HOST + urlNotebookJob)
+
+ if err != nil {
+ return nil, fmt.Errorf("resty CreateNotebookJob: %s", err)
+ }
+
+ if result.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ _ = getToken()
+ goto sendjob
+ }
+
+ if result.ErrorCode != 0 {
+ log.Error("CreateNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ return &result, fmt.Errorf("CreateNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ }
+
+ return &result, nil
+}
+
func createJob(req models.CreateGrampusJobRequest) (*models.CreateGrampusJobResponse, error) {
checkSetting()
client := getRestyClient()
@@ -120,6 +154,38 @@ sendjob:
return &result, nil
}
+func GetNotebookJob(jobID string) (*models.GrampusNotebookResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var result models.GrampusNotebookResponse
+
+ retry := 0
+
+sendjob:
+ _, err := client.R().
+ SetAuthToken(TOKEN).
+ SetResult(&result).
+ Get(HOST + urlNotebookJob + "/" + jobID)
+
+ if err != nil {
+ return nil, fmt.Errorf("resty GetNotebookJob: %v", err)
+ }
+
+ if result.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ log.Info("retry get token")
+ _ = getToken()
+ goto sendjob
+ }
+
+ if result.ErrorCode != 0 {
+ log.Error("GetNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ return nil, fmt.Errorf("GetNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
+ }
+
+ return &result, nil
+}
+
func GetJob(jobID string) (*models.GetGrampusJobResponse, error) {
checkSetting()
client := getRestyClient()
@@ -184,18 +250,23 @@ sendjob:
return &result, nil
}
-func GetImages(processorType string) (*models.GetGrampusImagesResult, error) {
+func GetImages(processorType string, jobType string) (*models.GetGrampusImagesResult, error) {
checkSetting()
client := getRestyClient()
var result models.GetGrampusImagesResult
retry := 0
+ queryType := "TrainJob"
+ if jobType == string(models.JobTypeDebug) {
+ queryType = "Notebook"
+ }
+
sendjob:
_, err := client.R().
SetAuthToken(TOKEN).
SetResult(&result).
- Get(HOST + urlGetImages + "?processorType=" + processorType)
+ Get(HOST + urlGetImages + "?processorType=" + processorType + "&trainType=" + queryType)
if err != nil {
return nil, fmt.Errorf("resty GetImages: %v", err)
@@ -271,19 +342,26 @@ func GetGrampusMetrics(jobID string) (models.GetTrainJobMetricStatisticResult, e
return result, nil
}
-func StopJob(jobID string) (*models.GrampusStopJobResponse, error) {
+func StopJob(jobID string, jobType ...string) (*models.GrampusStopJobResponse, error) {
checkSetting()
client := getRestyClient()
var result models.GrampusStopJobResponse
retry := 0
+ url := urlTrainJob
+ if len(jobType) > 0 {
+ if jobType[0] == string(models.JobTypeDebug) {
+ url = urlNotebookJob
+ }
+ }
+
sendjob:
_, err := client.R().
//SetHeader("Content-Type", "application/json").
SetAuthToken(TOKEN).
SetResult(&result).
- Post(HOST + urlTrainJob + "/" + jobID + "/stop")
+ Post(HOST + url + "/" + jobID + "/stop")
if err != nil {
return &result, fmt.Errorf("resty StopTrainJob: %v", err)
@@ -335,3 +413,33 @@ sendjob:
return &result, nil
}
+
+func RestartNotebookJob(jobID string) (*models.GrampusNotebookRestartResponse, error) {
+ checkSetting()
+ client := getRestyClient()
+ var restartResponse *models.GrampusNotebookRestartResponse
+ retry := 0
+
+sendjob:
+ res, err := client.R().
+ SetAuthToken(TOKEN).
+ SetResult(&restartResponse).
+ Post(HOST + urlNotebookJob + "/" + jobID + "/start")
+
+ if err != nil {
+ return nil, fmt.Errorf("resty grampus restart note book job: %v", err)
+ }
+ if restartResponse.ErrorCode == errorIllegalToken && retry < 1 {
+ retry++
+ log.Info("retry get token")
+ _ = getToken()
+ goto sendjob
+ }
+
+ if res.StatusCode() != http.StatusOK {
+ log.Error("resty grampus restart note book job failed(%s): %v", res.String(), err.Error())
+ return nil, fmt.Errorf("resty grampus restart note book job failed: %v", err)
+ }
+
+ return restartResponse, nil
+}
diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go
index b59be307b..dcad1eb00 100755
--- a/modules/modelarts/modelarts.go
+++ b/modules/modelarts/modelarts.go
@@ -20,34 +20,16 @@ import (
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
- "code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
)
const (
//notebook
+
storageTypeOBS = "obs"
autoStopDuration = 4 * 60 * 60
AutoStopDurationMs = 4 * 60 * 60 * 1000
- MORDELART_USER_IMAGE_ENGINE_ID = -1
- DataSetMountPath = "/home/ma-user/work"
- NotebookEnv = "Python3"
- NotebookType = "Ascend"
- FlavorInfo = "Ascend: 1*Ascend 910 CPU: 24 核 96GiB (modelarts.kat1.xlarge)"
-
- //train-job
- // ResourcePools = "{\"resource_pool\":[{\"id\":\"pool1328035d\", \"value\":\"专属资源池\"}]}"
- // Engines = "{\"engine\":[{\"id\":1, \"value\":\"Ascend-Powered-Engine\"}]}"
- // EngineVersions = "{\"version\":[{\"id\":118,\"value\":\"MindSpore-1.0.0-c75-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":119,\"value\":\"MindSpore-1.1.1-c76-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":120,\"value\":\"MindSpore-1.1.1-c76-tr5-python3.7-euleros2.8-aarch64\"}," +
- // "{\"id\":117,\"value\":\"TF-1.15-c75-python3.7-euleros2.8-aarch64\"}" +
- // "]}"
- // TrainJobFlavorInfo = "{\"flavor\":[{\"code\":\"modelarts.bm.910.arm.public.2\",\"value\":\"Ascend : 2 * Ascend 910 CPU:48 核 512GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.8\",\"value\":\"Ascend : 8 * Ascend 910 CPU:192 核 2048GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.4\",\"value\":\"Ascend : 4 * Ascend 910 CPU:96 核 1024GiB\"}," +
- // "{\"code\":\"modelarts.bm.910.arm.public.1\",\"value\":\"Ascend : 1 * Ascend 910 CPU:24 核 256GiB\"}" +
- // "]}"
+
CodePath = "/code/"
OutputPath = "/output/"
ResultPath = "/result/"
@@ -190,14 +172,6 @@ type OrgMultiNode struct {
Node []int `json:"node"`
}
-// type Parameter struct {
-// Label string `json:"label"`
-// Value string `json:"value"`
-// }
-
-// type Parameters struct {
-// Parameter []Parameter `json:"parameter"`
-// }
type Parameters struct {
Parameter []struct {
@@ -206,98 +180,23 @@ type Parameters struct {
} `json:"parameter"`
}
-func GenerateTask(ctx *context.Context, jobName, uuid, description, flavor string) error {
- var dataActualPath string
- if uuid != "" {
- dataActualPath = setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + "/"
- } else {
- userPath := setting.UserBasePath + ctx.User.Name + "/"
- isExist, err := storage.ObsHasObject(userPath)
- if err != nil {
- log.Error("ObsHasObject failed:%v", err.Error(), ctx.Data["MsgID"])
- return err
- }
-
- if !isExist {
- if err = storage.ObsCreateObject(userPath); err != nil {
- log.Error("ObsCreateObject failed:%v", err.Error(), ctx.Data["MsgID"])
- return err
- }
- }
-
- dataActualPath = setting.Bucket + "/" + userPath
- }
-
- if poolInfos == nil {
- json.Unmarshal([]byte(setting.PoolInfos), &poolInfos)
- }
- createTime := timeutil.TimeStampNow()
- jobResult, err := CreateJob(models.CreateNotebookParams{
- JobName: jobName,
- Description: description,
- ProfileID: setting.ProfileID,
- Flavor: flavor,
- Pool: models.Pool{
- ID: poolInfos.PoolInfo[0].PoolId,
- Name: poolInfos.PoolInfo[0].PoolName,
- Type: poolInfos.PoolInfo[0].PoolType,
- },
- Spec: models.Spec{
- Storage: models.Storage{
- Type: storageTypeOBS,
- Location: models.Location{
- Path: dataActualPath,
- },
- },
- AutoStop: models.AutoStop{
- Enable: true,
- Duration: autoStopDuration,
- },
- },
- })
- if err != nil {
- log.Error("CreateJob failed: %v", err.Error())
- return err
- }
- err = models.CreateCloudbrain(&models.Cloudbrain{
-
- Status: string(models.JobWaiting),
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCloudBrainTwo,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- })
-
- if err != nil {
- return err
- }
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobResult.ID, jobName, models.ActionCreateDebugNPUTask)
- return nil
-}
-
-func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) {
+func GenerateNotebook2(ctx *context.Context, req cloudbrain.GenerateModelArtsNotebookReq) (string, error) {
if poolInfos == nil {
json.Unmarshal([]byte(setting.PoolInfos), &poolInfos)
}
- imageName, err := GetNotebookImageName(imageId)
+ imageName, err := GetNotebookImageName(req.ImageId)
if err != nil {
log.Error("GetNotebookImageName failed: %v", err.Error())
return "", err
}
createTime := timeutil.TimeStampNow()
jobResult, err := createNotebook2(models.CreateNotebook2Params{
- JobName: jobName,
- Description: description,
- Flavor: spec.SourceSpecId,
- Duration: autoStopDurationInMs,
- ImageID: imageId,
+ JobName: req.JobName,
+ Description: req.Description,
+ Flavor: req.Spec.SourceSpecId,
+ Duration: req.AutoStopDurationMs,
+ ImageID: req.ImageId,
PoolID: poolInfos.PoolInfo[0].PoolId,
Feature: models.NotebookFeature,
Volume: models.VolumeReq{
@@ -310,13 +209,13 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
if err != nil {
log.Error("createNotebook2 failed: %v", err.Error())
if strings.HasPrefix(err.Error(), UnknownErrorPrefix) {
- log.Info("(%s)unknown error, set temp status", displayJobName)
+ log.Info("(%s)unknown error, set temp status", req.DisplayJobName)
errTemp := models.InsertCloudbrainTemp(&models.CloudbrainTemp{
JobID: models.TempJobId,
VersionID: models.TempVersionId,
Status: models.TempJobStatus,
Type: models.TypeCloudBrainTwo,
- JobName: jobName,
+ JobName: req.JobName,
JobType: string(models.JobTypeDebug),
})
if errTemp != nil {
@@ -327,23 +226,28 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
return "", err
}
task := &models.Cloudbrain{
- Status: jobResult.Status,
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- FlavorCode: spec.SourceSpecId,
- DisplayJobName: displayJobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCloudBrainTwo,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- Image: imageName,
- BootFile: bootFile,
- Description: description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
+ Status: jobResult.Status,
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobResult.ID,
+ JobName: req.JobName,
+ FlavorCode: req.Spec.SourceSpecId,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeCloudBrainTwo,
+ Uuid: req.Uuid,
+ ComputeResource: models.NPUResource,
+ Image: imageName,
+ BootFile: req.BootFile,
+ Description: req.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
}
err = models.CreateCloudbrain(task)
@@ -352,7 +256,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
}
stringId := strconv.FormatInt(task.ID, 10)
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugNPUTask)
return jobResult.ID, nil
}
@@ -379,6 +283,9 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str
Parameter: req.Parameters,
UserImageUrl: req.UserImageUrl,
UserCommand: req.UserCommand,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
})
} else {
@@ -399,6 +306,9 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str
Code: req.Spec.SourceSpecId,
},
Parameter: req.Parameters,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
})
}
@@ -517,6 +427,9 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job
PreVersionId: req.PreVersionId,
UserImageUrl: req.UserImageUrl,
UserCommand: req.UserCommand,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
}, jobId)
} else {
@@ -536,6 +449,9 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job
},
Parameter: req.Parameters,
PreVersionId: req.PreVersionId,
+ ShareAddr: setting.ModelArtsShareAddr,
+ MountPath: setting.ModelArtsMountPath,
+ NasType: setting.ModelArtsNasType,
},
}, jobId)
}
@@ -972,14 +888,14 @@ func getJupyterBaseUrl(url string) string {
}
func getCookiesAndCsrf(jupyterUrl string) ([]*http.Cookie, string) {
- log.Info("jupyter url:"+jupyterUrl)
+ log.Info("jupyter url:" + jupyterUrl)
var cookies []*http.Cookie
const retryTimes = 10
for i := 0; i < retryTimes; i++ {
res, err := http.Get(jupyterUrl)
if err != nil {
- log.Error("browser jupyterUrl failed.",err)
- if i==retryTimes-1{
+ log.Error("browser jupyterUrl failed.", err)
+ if i == retryTimes-1 {
return cookies, ""
}
diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go
index 3ccba9011..a8981cf9a 100755
--- a/modules/modelarts/resty.go
+++ b/modules/modelarts/resty.go
@@ -497,7 +497,7 @@ sendjob:
}
req, _ := json.Marshal(createJobParams)
- log.Info("%s", req)
+ log.Info("postapi json: %s", req)
if res.StatusCode() == http.StatusUnauthorized && retry < 1 {
retry++
@@ -543,6 +543,8 @@ func createTrainJob(createJobParams models.CreateTrainJobParams) (*models.Create
var result models.CreateTrainJobResult
retry := 0
+ req, _ := json.Marshal(createJobParams)
+ log.Info("postapi json: %s", req)
sendjob:
res, err := client.R().
diff --git a/modules/modelarts_cd/modelarts.go b/modules/modelarts_cd/modelarts.go
index 93032fa89..bdc42002a 100755
--- a/modules/modelarts_cd/modelarts.go
+++ b/modules/modelarts_cd/modelarts.go
@@ -5,6 +5,8 @@ import (
"strconv"
"strings"
+ "code.gitea.io/gitea/modules/cloudbrain"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
@@ -88,19 +90,19 @@ type Parameters struct {
} `json:"parameter"`
}
-func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, description, imageId string, spec *models.Specification, bootFile string,autoStopDurationInMs int64) (string, error) {
- imageName, err := GetNotebookImageName(imageId)
+func GenerateNotebook(ctx *context.Context, req cloudbrain.GenerateModelArtsNotebookReq) (string, error) {
+ imageName, err := GetNotebookImageName(req.ImageId)
if err != nil {
log.Error("GetNotebookImageName failed: %v", err.Error())
return "", err
}
createTime := timeutil.TimeStampNow()
jobResult, err := createNotebook(models.CreateNotebookWithoutPoolParams{
- JobName: jobName,
- Description: description,
- Flavor: spec.SourceSpecId,
- Duration: autoStopDurationInMs,
- ImageID: imageId,
+ JobName: req.JobName,
+ Description: req.Description,
+ Flavor: req.Spec.SourceSpecId,
+ Duration: req.AutoStopDurationMs,
+ ImageID: req.ImageId,
Feature: models.NotebookFeature,
Volume: models.VolumeReq{
Capacity: setting.Capacity,
@@ -112,13 +114,13 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
if err != nil {
log.Error("createNotebook failed: %v", err.Error())
if strings.HasPrefix(err.Error(), UnknownErrorPrefix) {
- log.Info("(%s)unknown error, set temp status", displayJobName)
+ log.Info("(%s)unknown error, set temp status", req.DisplayJobName)
errTemp := models.InsertCloudbrainTemp(&models.CloudbrainTemp{
JobID: models.TempJobId,
VersionID: models.TempVersionId,
Status: models.TempJobStatus,
Type: models.TypeCDCenter,
- JobName: jobName,
+ JobName: req.JobName,
JobType: string(models.JobTypeDebug),
})
if errTemp != nil {
@@ -129,23 +131,28 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
return "", err
}
task := &models.Cloudbrain{
- Status: jobResult.Status,
- UserID: ctx.User.ID,
- RepoID: ctx.Repo.Repository.ID,
- JobID: jobResult.ID,
- JobName: jobName,
- FlavorCode: spec.SourceSpecId,
- DisplayJobName: displayJobName,
- JobType: string(models.JobTypeDebug),
- Type: models.TypeCDCenter,
- Uuid: uuid,
- ComputeResource: models.NPUResource,
- Image: imageName,
- Description: description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
- BootFile: bootFile,
+ Status: jobResult.Status,
+ UserID: ctx.User.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ JobID: jobResult.ID,
+ JobName: req.JobName,
+ FlavorCode: req.Spec.SourceSpecId,
+ DisplayJobName: req.DisplayJobName,
+ JobType: string(models.JobTypeDebug),
+ Type: models.TypeCDCenter,
+ Uuid: req.Uuid,
+ ComputeResource: models.NPUResource,
+ Image: imageName,
+ Description: req.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: req.Spec,
+ BootFile: req.BootFile,
+ ModelName: req.ModelName,
+ ModelVersion: req.ModelVersion,
+ LabelName: req.LabelName,
+ PreTrainModelUrl: req.PreTrainModelUrl,
+ CkptName: req.CkptName,
}
err = models.CreateCloudbrain(task)
@@ -154,7 +161,7 @@ func GenerateNotebook(ctx *context.Context, displayJobName, jobName, uuid, descr
}
stringId := strconv.FormatInt(task.ID, 10)
- notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugNPUTask)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugNPUTask)
return jobResult.ID, nil
}
diff --git a/modules/redis/redis_key/repo_redis_key.go b/modules/redis/redis_key/repo_redis_key.go
new file mode 100644
index 000000000..b2b7ccd0a
--- /dev/null
+++ b/modules/redis/redis_key/repo_redis_key.go
@@ -0,0 +1,9 @@
+package redis_key
+
+import "fmt"
+
+const REPO_PREFIX = "repo"
+
+func RepoTopNContributors(repoId int64, N int) string {
+ return KeyJoin(REPO_PREFIX, fmt.Sprint(repoId), fmt.Sprint(N), "top_n_contributor")
+}
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
index e6254489f..2963d6a1e 100755
--- a/modules/setting/setting.go
+++ b/modules/setting/setting.go
@@ -520,6 +520,7 @@ var (
CullInterval string
DebugAttachSize int
+
//benchmark config
IsBenchmarkEnabled bool
BenchmarkOwner string
@@ -585,6 +586,9 @@ var (
TrainJobFLAVORINFOS string
ModelArtsSpecialPools string
ModelArtsMultiNode string
+ ModelArtsShareAddr string
+ ModelArtsMountPath string
+ ModelArtsNasType string
//kanban
IsCloudbrainTimingEnabled bool
@@ -614,6 +618,16 @@ var (
UsageRateBeginTime string
}{}
+ ClearStrategy= struct {
+ Enabled bool
+ ResultSaveDays int
+ BatchSize int
+ DebugJobSize int
+ TrashSaveDays int
+ Cron string
+ RunAtStart bool
+ }{}
+
C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo
@@ -667,6 +681,10 @@ var (
CloudbrainStoppedTitle string
CloudbrainStoppedRemark string
+ //repo square config
+ IncubationSourceOrgName string
+ PaperRepoTopicName string
+
//nginx proxy
PROXYURL string
RadarMap = struct {
@@ -1544,6 +1562,9 @@ func NewContext() {
TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("")
ModelArtsSpecialPools = sec.Key("SPECIAL_POOL").MustString("")
ModelArtsMultiNode = sec.Key("MULTI_NODE").MustString("")
+ ModelArtsShareAddr = sec.Key("ModelArts_Share_Addr").MustString("192.168.0.30:/")
+ ModelArtsMountPath = sec.Key("ModelArts_Mount_Path").MustString("/cache/sfs")
+ ModelArtsNasType = sec.Key("ModelArts_Nas_Type").MustString("nfs")
sec = Cfg.Section("elk")
ElkUrl = sec.Key("ELKURL").MustString("")
@@ -1576,6 +1597,10 @@ func NewContext() {
CloudbrainStoppedTitle = sec.Key("CLOUDBRAIN_STOPPED_TITLE").MustString("您好,您申请的算力资源已结束使用,任务已完成运行,状态为%s,请您关注运行结果")
CloudbrainStoppedRemark = sec.Key("CLOUDBRAIN_STOPPED_REMARK").MustString("感谢您的耐心等待。")
+ sec = Cfg.Section("repo-square")
+ IncubationSourceOrgName = sec.Key("INCUBATION_ORG_NAME").MustString("OpenI")
+ PaperRepoTopicName = sec.Key("PAPER_REPO_TOPIC_NAME").MustString("openi-paper")
+
sec = Cfg.Section("point")
CloudBrainPaySwitch = sec.Key("CLOUDBRAIN_PAY_SWITCH").MustBool(false)
CloudBrainPayDelay = sec.Key("CLOUDBRAIN_PAY_DELAY").MustDuration(30 * time.Minute)
@@ -1621,6 +1646,7 @@ func NewContext() {
getModelConvertConfig()
getModelSafetyConfig()
getModelAppConfig()
+ getClearStrategy()
}
func getModelSafetyConfig() {
@@ -1681,6 +1707,18 @@ func getModelartsCDConfig() {
getNotebookFlavorInfos()
}
+func getClearStrategy(){
+
+ sec := Cfg.Section("clear_strategy")
+ ClearStrategy.Enabled=sec.Key("ENABLED").MustBool(false)
+ ClearStrategy.ResultSaveDays=sec.Key("RESULT_SAVE_DAYS").MustInt(30)
+ ClearStrategy.BatchSize=sec.Key("BATCH_SIZE").MustInt(500)
+ ClearStrategy.DebugJobSize=sec.Key("DEBUG_BATCH_SIZE").MustInt(100)
+ ClearStrategy.TrashSaveDays=sec.Key("TRASH_SAVE_DAYS").MustInt(90)
+ ClearStrategy.Cron=sec.Key("CRON").MustString("* 0,30 2-8 * * ?")
+ ClearStrategy.RunAtStart=sec.Key("RUN_AT_START").MustBool(false)
+}
+
func getGrampusConfig() {
sec := Cfg.Section("grampus")
diff --git a/modules/structs/pipeline.go b/modules/structs/pipeline.go
new file mode 100644
index 000000000..fd26d1b51
--- /dev/null
+++ b/modules/structs/pipeline.go
@@ -0,0 +1,23 @@
+package structs
+
+type Pipeline struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+}
+type NodeInfo struct {
+ Name string `json:"name"`
+ Status string `json:"status"`
+ Code string `json:"code"`
+ Message string `json:"message"`
+}
+
+type PipelineNotification struct {
+ Type int `json:"type"`
+ Username string `json:"username"`
+ Reponame string `json:"reponame"`
+ Pipeline Pipeline `json:"pipeline"`
+ PipelineRunId string `json:"pipeline_run_id"`
+ Node NodeInfo `json:"node"`
+ OccurTime int64 `json:"occur_time"`
+}
diff --git a/modules/structs/tagger.go b/modules/structs/tagger.go
index 8933c8c5c..c32ad8040 100644
--- a/modules/structs/tagger.go
+++ b/modules/structs/tagger.go
@@ -1,6 +1,7 @@
package structs
type Tagger struct {
+ ID int64 `json:"id"`
Name string `json:"name"`
Email string `json:"email"`
RelAvatarURL string `json:"relAvatarURL"`
diff --git a/modules/templates/helper.go b/modules/templates/helper.go
index 89e470b87..2a9413bde 100755
--- a/modules/templates/helper.go
+++ b/modules/templates/helper.go
@@ -47,6 +47,7 @@ const (
REF_TYPE_BRANCH = "branch"
REF_TYPE_TAG = "tag"
REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)"
+ DURATION_STR_ZERO = "00:00:00"
)
// Used from static.go && dynamic.go
@@ -112,6 +113,7 @@ func NewFuncMap() []template.FuncMap {
"AttachmentStatus": dataset.GetStatusText,
"IsShowDataSetOfCurrentRepo": dataset.IsShowDataSetOfCurrentRepo,
"TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "ConvertDurationToStr": ConvertDurationToStr,
"RawTimeSince": timeutil.RawTimeSince,
"FileSize": base.FileSize,
"PrettyNumber": base.PrettyNumber,
@@ -368,6 +370,7 @@ func NewTextFuncMap() []texttmpl.FuncMap {
"TimeSinceUnix": timeutil.TimeSinceUnix,
"TimeSinceUnix1": timeutil.TimeSinceUnix1,
"TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "ConvertDurationToStr": ConvertDurationToStr,
"RawTimeSince": timeutil.RawTimeSince,
"AttachmentResourceType": dataset.GetResourceType,
"AttachmentStatus": dataset.GetStatusText,
@@ -807,3 +810,9 @@ func MB2GB(size int) string {
}
return s
}
+func ConvertDurationToStr(duration int64) string {
+ if duration <= 0 {
+ return DURATION_STR_ZERO
+ }
+ return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60)
+}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 647bdb1ad..035b323e0 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -852,6 +852,7 @@ description = Description
description_format_err=Description's length can be up to %s characters long.
create_dataset = Create Dataset
download_url=Download Url
+download_model_url=Download Url
download_oper=Operation
download_copy=Copy URL
create_dataset_fail=Failed to create dataset.
@@ -1060,6 +1061,7 @@ model_rename=Duplicate model name, please modify model name.
notebook_file_not_exist=Notebook file does not exist.
notebook_select_wrong=Please select a Notebook(.ipynb) file first.
notebook_file_no_right=You have no right to access the Notebook(.ipynb) file.
+debug_again_fail=Fail to restart debug task, please try again later.
date=Date
repo_add=Project Increment
@@ -1305,6 +1307,11 @@ model.manage.select.engine=Select model engine
model.manage.modelfile=Model file
model.manage.modellabel=Model label
model.manage.modeldesc=Model description
+model.manage.modelaccess=Model Access
+model.manage.modelaccess.public=Public
+model.manage.modelaccess.private=Private
+model.manage.modelaccess.setpublic=Set Public
+model.manage.modelaccess.setprivate=Set Private
model.manage.baseinfo=Base Information
modelconvert.notcreate=No model conversion task has been created.
modelconvert.importfirst1=Please import the
@@ -1337,9 +1344,12 @@ modelconvert.inputshapeerror=Format input error, please input such as: 1,1,32,32
modelconvert.manage.create_error1=A model transformation task with the same name already exists.
modelconvert.manage.create_error2=Only one running model transformation task can be created.
-modelconvert.manage.model_not_exist=The model does not exist.
+modelconvert.manage.model_not_exist=The model in the task does not exist or has been deleted.
modelconvert.manage.no_operate_right=You have no right to do the operation.
+debug.manage.model_not_exist=The model in the task does not exist or has been deleted, please create a new debug job.
+debug.manage.dataset_not_exist=The part of datasets in the task does not exist or has been deleted, please create a new debug job.
+
grampus.train_job.ai_center = AI Center
grampus.dataset_path_rule = The code is storaged in /cache/code;the dataset is storaged in /cache/dataset;and please put your model into /cache/output, then you can download it online。
grampus.gpu_dataset_path_rule = The code is storaged in /tmp/code;the dataset is storaged in /tmp/dataset;and please put your model into /tmp/output, then you can download it online。
@@ -3115,6 +3125,8 @@ reject_pull_request = `suggested changes for %s#%[2]s`
upload_dataset=`upload dataset %s`
task_gpudebugjob=`created CPU/GPU type debugging task %s`
task_npudebugjob=`created NPU type debugging task %s`
+task_c2net_gpudebugjob=`created CPU/GPU type debugging task %s`
+task_c2net_npudebugjob=`created NPU type debugging task %s`
task_nputrainjob=`created NPU training task %s`
task_inferencejob=`created reasoning task %s`
task_benchmark=`created profiling task %s`
@@ -3234,6 +3246,7 @@ dataset = Dataset
resource_specification = Resource specification
dataset_storage_path = Dataset storage path
model_storage_path = Model storage path
+output_storage_path = Output storage path
code_storage_path = Code storage path
benchmark_path = Benchmark script path
snn4imagenet_path = Snn4imagenet script path
@@ -3246,6 +3259,7 @@ specification = specification
select_specification = select specification
description = description
wrong_specification=You cannot use this specification, please choose another item.
+result_cleared=The files of the task have been cleared, can not restart any more, please create a new debug task instead.
resource_use=Resource Occupancy
job_name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-).
@@ -3288,8 +3302,11 @@ load_code_failed=Fail to load code, please check if the right branch is selected
error.dataset_select = dataset select error:the count exceed the limit or has same name
new_train_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online
+new_debug_gpu_tooltips = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the %s, and please put your model into %s then you can download it online
+new_debug_gpu_tooltips1 = The code is storaged in %s, the dataset is storaged in %s, the pre-trained model is storaged in the %s.
new_train_npu_tooltips = The code is storaged in %s, the pre-trained model is storaged in the run parameter %s, and please put your model into %s then you can download it online
new_infer_gpu_tooltips = The dataset is stored in %s, the model file is stored in %s, please store the inference output in %s for subsequent downloads.
+code_obs_address = Code OBS address
[points]
points = points
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index 3cfdd913b..6d68df832 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -863,6 +863,7 @@ reference_dataset_fail=关联数据集失败,请稍后再试。
cancel_reference_dataset_fail=取消关联数据集失败,请稍后再试。
download_url=数据集下载地址
+download_model_url=模型文件下载地址
download_copy=复制链接
download_oper=操作
show_dataset=数据集
@@ -1059,6 +1060,7 @@ model_rename=模型名称重复,请修改模型名称
notebook_file_not_exist=Notebook文件不存在。
notebook_select_wrong=请先选择Notebook(.ipynb)文件。
notebook_file_no_right=您没有这个Notebook文件的读权限。
+debug_again_fail=再次调试失败,请稍后再试。
date=日期
repo_add=新增项目
@@ -1318,6 +1320,11 @@ model.manage.select.engine=选择模型框架
model.manage.modelfile=模型文件
model.manage.modellabel=模型标签
model.manage.modeldesc=模型描述
+model.manage.modelaccess=模型权限
+model.manage.modelaccess.public=公开
+model.manage.modelaccess.private=私有
+model.manage.modelaccess.setpublic=设为公开
+model.manage.modelaccess.setprivate=设为私有
model.manage.baseinfo=基本信息
modelconvert.notcreate=未创建过模型转换任务
modelconvert.importfirst1=请您先导入
@@ -1351,9 +1358,13 @@ modelconvert.modelfileempty=请选择模型文件。
modelconvert.manage.create_error1=相同的名称模型转换任务已经存在。
modelconvert.manage.create_error2=只能创建一个正在运行的模型转换任务。
-modelconvert.manage.model_not_exist=选择的模型不存在。
+modelconvert.manage.model_not_exist=任务中选择的模型不存在或者已被删除。
modelconvert.manage.no_operate_right=您没有操作权限。
+
+debug.manage.model_not_exist=任务中选择的模型不存在或者已被删除,请新建调试任务。
+debug.manage.dataset_not_exist=任务中选择的部分数据集不存在或者已被删除,请新建调试任务。
+
grampus.train_job.ai_center=智算中心
grampus.dataset_path_rule = 训练脚本存储在/cache/code中,数据集存储在/cache/dataset中,训练输出请存储在/cache/output中以供后续下载。
grampus.gpu_dataset_path_rule = 训练脚本存储在/tmp/code中,数据集存储在/tmp/dataset中,训练输出请存储在/tmp/output中以供后续下载。
@@ -3132,6 +3143,8 @@ reject_pull_request=`建议变更 %s#%[2]s`
upload_dataset=`上传了数据集文件 %s`
task_gpudebugjob=`创建了CPU/GPU类型调试任务 %s`
task_npudebugjob=`创建了NPU类型调试任务 %s`
+task_c2net_gpudebugjob=`创建了CPU/GPU类型调试任务 %s`
+task_c2net_npudebugjob=`创建了NPU类型调试任务 %s`
task_nputrainjob=`创建了NPU类型训练任务 %s`
task_inferencejob=`创建了推理任务 %s`
task_benchmark=`创建了评测任务 %s`
@@ -3252,6 +3265,7 @@ resource_specification = 资源规格
dataset_storage_path = 数据集存放路径
model_storage_path = 模型存放路径
code_storage_path = 代码存放路径
+output_storage_path = 输出存放路径
benchmark_path = benchmark脚本存放路径
snn4imagenet_path = snn4imagenet脚本存放路径
brainscore_path = brainscore脚本存放路径
@@ -3266,6 +3280,8 @@ card_duration = 运行卡时
card_type = 卡类型
wrong_specification=您目前不能使用这个资源规格,请选择其他资源规格。
+result_cleared=本任务的文件已被清理,无法再次调试,请新建调试任务。
+
job_name_rule = 请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。
train_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,预训练模型存放在运行参数 ckpt_url 中,训练输出路径存储在运行参数 train_url 中。
infer_dataset_path_rule = 数据集位置存储在运行参数 data_url 中,推理输出路径存储在运行参数 result_url 中。
@@ -3307,8 +3323,11 @@ load_code_failed=代码加载失败,请确认选择了正确的分支。
error.debug_datasetsize = 数据集大小超过限制('%d'GB)
error.dataset_select = 数据集选择错误:数量超过限制或者有同名数据集
new_train_gpu_tooltips = 训练脚本存储在 %s 中,数据集存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。
+new_debug_gpu_tooltips = 项目代码存储在 %s 中,数据集存储在 %s 中,选择的模型存储在 %s 中,调试输出请存储在 %s 中以供后续下载。
+new_debug_gpu_tooltips1 = 项目代码存储在 %s 中,数据集存储在 %s 中,选择的模型存储在 %s 中。
new_train_npu_tooltips = 训练脚本存储在 %s 中,预训练模型存放在运行参数 %s 中,训练输出请存储在 %s 中以供后续下载。
new_infer_gpu_tooltips = 数据集存储在 %s 中,模型文件存储在 %s 中,推理输出请存储在 %s 中以供后续下载。
+code_obs_address = 代码obs地址
[points]
points = 积分
diff --git a/package.json b/package.json
index 7748f3de3..fa2c5327b 100644
--- a/package.json
+++ b/package.json
@@ -17,11 +17,12 @@
"core-js": "3.6.5",
"css-loader": "3.5.3",
"cssnano": "4.1.10",
+ "dayjs": "1.10.7",
"domino": "2.1.5",
"dropzone": "5.7.2",
"echarts": "3.8.5",
"element-ui": "2.15.5",
- "esdk-obs-browserjs": "3.20.7",
+ "esdk-obs-browserjs": "3.22.3",
"esdk-obs-nodejs": "3.20.11",
"fast-glob": "3.2.2",
"file-loader": "6.0.0",
diff --git a/public/home/home.js b/public/home/home.js
index aeb51b184..fe843161e 100755
--- a/public/home/home.js
+++ b/public/home/home.js
@@ -247,7 +247,7 @@ document.onreadystatechange = function () {
html += recordPrefix + actionName;
html += " " + record.RefName + ""
}
- else if(record.OpType == "25" || record.OpType == "29"){
+ else if(record.OpType == "25" || record.OpType == "29" || record.OpType == "39" || record.OpType == "40"){
html += recordPrefix + actionName;
html += " " + record.RefName + ""
}
@@ -294,7 +294,10 @@ function getTaskLink(record){
re = re + "/cloudbrain/train-job/" + record.Content;
}else if(record.OpType == 32 || record.OpType == 33){
re = re + "/grampus/train-job/" + record.Content;
+ }else if(record.OpType == 39 || record.OpType == 40){
+ re = re + "/grampus/notebook/" + record.Content;
}
+
re = encodeURI(re);
return re;
}
@@ -450,7 +453,9 @@ var actionNameZH={
"33":"创建了CPU/GPU类型训练任务",
"35":"创建的数据集 {dataset} 被设置为推荐数据集",
"36":"提交了镜像 {image}",
- "37":"提交的镜像 {image} 被设置为推荐镜像",
+ "37": "提交的镜像 {image} 被设置为推荐镜像",
+ "39":"创建了CPU/GPU类型调试任务",
+ "40":"创建了NPU类型调试任务",
};
var actionNameEN={
@@ -481,7 +486,9 @@ var actionNameEN={
"33":" created CPU/GPU type training task",
"35":" created dataset {dataset} was set as recommended dataset",
"36":"committed image {image}",
- "37":"committed image {image} was set as recommended image",
+ "37": "committed image {image} was set as recommended image",
+ "39":" created CPU/GPU type debugging task ",
+ "40":" created NPU type debugging task ",
};
var repoAndOrgZH={
@@ -622,20 +629,12 @@ function displayRepo(json){
for (var i = 0, iLen = repos.length; i < iLen; i++) {
if (i >= 4) break;
var repo = repos[i];
- // ${repo["NumStars"]}${repo["NumForks"]}
html += `
-
- ${repo["Avatar"] ? `

` : `
![]()
`}
+
+ ${repo["Avatar"] ? `

` : `
![]()
`}
${repo["Description"]}
- `;
- // if (repo["Topics"] != null) {
- // for(var j = 0; j < repo["Topics"].length; j++){
- // var topic = repo["Topics"][j];
- // var url = "/explore/repos?q=" + (topic) + "&topic="
- // html += `
${topic}`;
- // }
- // }
+
`;
html += `
`;
diff --git a/routers/admin/resources.go b/routers/admin/resources.go
index feea7b69b..8db958ef5 100644
--- a/routers/admin/resources.go
+++ b/routers/admin/resources.go
@@ -307,3 +307,37 @@ func RefreshHistorySpec(ctx *context.Context) {
r["total"] = total
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}
+
+func RefreshReposHistoryCnt(ctx *context.Context) {
+ scope := ctx.Query("scope")
+ list := ctx.Query("list")
+
+ var scopeAll = false
+ if scope == "all" {
+ scopeAll = true
+ }
+ var ids = make([]int64, 0)
+ if list != "" {
+ strs := strings.Split(list, "|")
+ for _, s := range strs {
+ i, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
+ return
+ }
+ ids = append(ids, i)
+ }
+
+ }
+
+ total, success, err := resource.RefreshHistorySpec(scopeAll, ids)
+ if err != nil {
+ log.Error("RefreshHistorySpec error. %v", err)
+ ctx.JSON(http.StatusOK, response.ServerError(err.Error()))
+ return
+ }
+ r := make(map[string]interface{}, 0)
+ r["success"] = success
+ r["total"] = total
+ ctx.JSON(http.StatusOK, response.SuccessWithData(r))
+}
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index 8958c55a4..4936c2362 100755
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -544,6 +544,10 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/complete_multipart", repo.CompleteMultipart)
}, reqToken())
+ m.Group("/pipeline", func() {
+ m.Post("/notification", bind(api.PipelineNotification{}), notify.PipelineNotify)
+
+ }, reqToken())
// Notifications
m.Group("/notifications", func() {
@@ -610,6 +614,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/query_invitation_yesterday", operationReq, repo_ext.QueryInvitationYesterday)
m.Get("/query_invitation_all", operationReq, repo_ext.QueryInvitationAll)
m.Get("/query_invitation_userdefine", operationReq, repo_ext.QueryUserDefineInvitationPage)
+ m.Get("/query_user_annual_report", repo_ext.QueryUserAnnualReport)
m.Get("/download_invitation_detail", operationReq, repo_ext.DownloadInvitationDetail)
@@ -738,7 +743,7 @@ func RegisterRoutes(m *macaron.Macaron) {
}, reqToken(), repoAssignment())
m.Group("/file_notebook", func() {
- m.Get("", reqToken(), repo.GetFileNoteBookInfo)
+ m.Get("", repo.GetFileNoteBookInfo)
m.Post("/create", reqToken(), reqWeChat(), bind(api.CreateFileNotebookJobOption{}), repo.CreateFileNoteBook)
})
@@ -758,6 +763,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/:username/:reponame", func() {
m.Get("/right", reqToken(), repo.GetRight)
m.Get("/tagger", reqToken(), repo.ListTagger)
+ m.Get("/cloudBrainJobId", repo.GetCloudBrainJobId)
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
@@ -994,6 +1000,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
m.Get("/model_list", repo.CloudBrainModelList)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
+ m.Put("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GeneralCloudBrainJobStop)
})
})
m.Group("/inference-job", func() {
@@ -1014,12 +1021,15 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Delete("/delete_model", repo.DeleteModel)
m.Get("/downloadall", repo.DownloadModel)
m.Get("/query_model_byId", repo.QueryModelById)
+ m.Get("/query_model_byName", repo.QueryModelByName)
m.Get("/query_model_for_predict", repo.QueryModelListForPredict)
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict)
m.Get("/query_train_model", repo.QueryTrainModelList)
m.Post("/create_model_convert", repo.CreateModelConvert)
+ m.Post("/convert_stop", repo.StopModelConvert)
m.Get("/show_model_convert_page", repo.ShowModelConvertPage)
m.Get("/query_model_convert_byId", repo.QueryModelConvertById)
+ m.Get("/query_model_convert_byName", repo.QueryModelConvertByName)
m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
@@ -1052,6 +1062,9 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/grampus", func() {
+ m.Group("/notebook", func() {
+ m.Get("/:id", repo_ext.GetGrampusNotebook)
+ })
m.Group("/train-job", func() {
m.Group("/:jobid", func() {
m.Get("", repo.GetModelArtsTrainJobVersion)
diff --git a/routers/api/v1/notify/pipeline.go b/routers/api/v1/notify/pipeline.go
new file mode 100644
index 000000000..021af20dc
--- /dev/null
+++ b/routers/api/v1/notify/pipeline.go
@@ -0,0 +1,15 @@
+package notify
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/context"
+ api "code.gitea.io/gitea/modules/structs"
+)
+
+func PipelineNotify(ctx *context.APIContext, form api.PipelineNotification) {
+
+ ctx.JSON(http.StatusOK, models.BaseOKMessageApi)
+
+}
diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go
index cd8340c41..805443788 100755
--- a/routers/api/v1/repo/cloudbrain.go
+++ b/routers/api/v1/repo/cloudbrain.go
@@ -9,6 +9,7 @@ import (
"bufio"
"encoding/json"
"io"
+ "io/ioutil"
"net/http"
"os"
"path"
@@ -17,6 +18,8 @@ import (
"strings"
"time"
+ "code.gitea.io/gitea/modules/grampus"
+
cloudbrainService "code.gitea.io/gitea/services/cloudbrain"
"code.gitea.io/gitea/modules/convert"
@@ -80,6 +83,30 @@ func CloudBrainShow(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)})
}
+func GeneralCloudBrainJobStop(ctx *context.APIContext) {
+ task := ctx.Cloudbrain
+ if task.IsTerminal() {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Already_stopped"))
+ return
+ }
+ var err error
+
+ if ctx.Cloudbrain.Type == models.TypeCloudBrainOne {
+ err = cloudbrain.StopJob(task.JobID)
+ } else if ctx.Cloudbrain.Type == models.TypeCloudBrainTwo {
+ _, err = modelarts.StopTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10))
+ } else {
+ _, err = grampus.StopJob(task.JobID)
+ }
+
+ if err != nil {
+ log.Warn("cloud brain stopped failed.", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("cloudbrain.Stopped_failed"))
+ return
+ }
+
+ ctx.JSON(http.StatusOK, models.BaseOKMessageApi)
+}
func CreateFileNoteBook(ctx *context.APIContext, option api.CreateFileNotebookJobOption) {
cloudbrainTask.FileNotebookCreate(ctx.Context, option)
}
@@ -211,7 +238,7 @@ func GetCloudbrainTask(ctx *context.APIContext) {
ID := ctx.Params(":id")
- job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID)
+ job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID, "id")
if err != nil {
ctx.NotFound(err)
@@ -621,6 +648,19 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
}
}
+ existStr := ""
+ if job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference) {
+ if job.Type == models.TypeCloudBrainOne {
+ result, err := cloudbrain.GetJob(job.JobID)
+ if err == nil && result != nil {
+ jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
+ taskRoles := jobRes.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ existStr = taskRes.TaskStatuses[0].ExitDiagnostics
+ }
+ }
+ }
+
logDir := "/model"
if job.JobType == string(models.JobTypeInference) || job.JobType == string(models.JobTypeModelSafety) {
logDir = cloudbrain.ResultPath
@@ -638,17 +678,30 @@ func CloudbrainDownloadLogFile(ctx *context.Context) {
}
}
if fileName != "" {
- prefix := "/" + setting.CBCodePathPrefix + job.JobName + logDir
- url, err := storage.Attachments.PresignedGetURL(prefix+"/"+fileName, fileName)
+ prefix := "/" + setting.CBCodePathPrefix + job.JobName + "/model"
+ filePath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + prefix + "/" + fileName
+ // Read the file contents into a byte slice
+ data, err := ioutil.ReadFile(filePath)
if err != nil {
- log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"])
+ ctx.ServerError("ReadFile", err)
+ return
+ }
+
+ // Set the appropriate response headers
+ ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
+ ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName)
+
+ // Write the file contents to the response
+ if _, err := ctx.Resp.Write(data); err != nil {
+ ctx.ServerError("Write", err)
+ return
+ }
+ if _, err := ctx.Resp.Write([]byte(existStr)); err != nil {
+ log.Error("Write failed: %v", err.Error(), ctx.Data["msgID"])
return
}
- log.Info("fileName=" + fileName)
- http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusTemporaryRedirect)
} else {
log.Info("fileName is null.")
-
}
}
@@ -734,8 +787,28 @@ func CloudbrainGetLog(ctx *context.APIContext) {
content = result["Content"].(string)
}
- if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
- content = content + ctx.Data["existStr"].(string)
+ if (job.JobType == string(models.JobTypeTrain) || job.JobType == string(models.JobTypeInference)) && job.Type == models.TypeCloudBrainOne && job.Status == string(models.JobFailed) {
+ if ctx.Data["existStr"] != nil {
+ if baseLine == "" && order == "desc" && result["Lines"].(int) == 0 {
+ result["Lines"] = 1
+ result["EndLine"] = 1
+ content = content + ctx.Data["existStr"].(string)
+ }
+
+ if result["Lines"].(int) == 0 && result["StartLine"] == result["EndLine"] && result["StartLine"].(int) != 0 {
+ content = content + ctx.Data["existStr"].(string)
+ result["Lines"] = 1
+ result["StartLine"] = result["StartLine"].(int) - 1
+ }
+ if result["Lines"].(int) == 1 && result["StartLine"] == result["EndLine"] {
+ result["Lines"] = 0
+ result["StartLine"] = result["StartLine"].(int) + 1
+ }
+ }
+ } else {
+ if ctx.Data["existStr"] != nil && result["Lines"].(int) < 50 {
+ content = content + ctx.Data["existStr"].(string)
+ }
}
logFileName := result["FileName"]
diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go
index 7fe5d603c..bb04038b9 100755
--- a/routers/api/v1/repo/cloudbrain_dashboard.go
+++ b/routers/api/v1/repo/cloudbrain_dashboard.go
@@ -103,86 +103,62 @@ func GetAllCloudbrainsOverview(ctx *context.Context) {
})
}
func GetOverviewDuration(ctx *context.Context) {
- recordCloudbrain, err := models.GetRecordBeginTime()
- if err != nil {
- log.Error("Can not get recordCloudbrain", err)
- ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err"))
- return
- }
- recordBeginTime := recordCloudbrain[0].Cloudbrain.CreatedUnix
- now := time.Now()
- endTime := now
- var workServerNumber int64
- var cardNum int64
-
- durationAllSum := int64(0)
- cardDuSum := int64(0)
+ durationSum := 0
+ cardDurationSum := 0
- cloudBrainOneCardDuSum := int64(0)
- cloudBrainTwoCardDuSum := int64(0)
- c2NetCardDuSum := int64(0)
- cDNetCardDuSum := int64(0)
+ cloudBrainOneCardDuSum := 0
+ cloudBrainTwoCardDuSum := 0
+ c2NetCardDuSum := 0
+ cDNetCardDuSum := 0
- cloudBrainOneDuration := int64(0)
- cloudBrainTwoDuration := int64(0)
- c2NetDuration := int64(0)
- cDCenterDuration := int64(0)
+ cloudBrainOneDuration := 0
+ cloudBrainTwoDuration := 0
+ c2NetDuration := 0
+ cDCenterDuration := 0
- cloudbrains, _, err := models.CloudbrainAllKanBan(&models.CloudbrainsOptions{
- Type: models.TypeCloudBrainAll,
- BeginTimeUnix: int64(recordBeginTime),
- EndTimeUnix: endTime.Unix(),
- })
+ cloudbrainTypeDuration, err := models.GetCloudbrainTypeCardDuration()
if err != nil {
- ctx.ServerError("Get cloudbrains failed:", err)
+ log.Error("GetCloudbrainTypeCardDuration err!", err)
return
}
- models.LoadSpecs4CloudbrainInfo(cloudbrains)
-
- for _, cloudbrain := range cloudbrains {
- cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
- if cloudbrain.Cloudbrain.Spec != nil {
- cardNum = int64(cloudbrain.Cloudbrain.Spec.AccCardsNum)
- } else {
- cardNum = 1
+ for _, result := range cloudbrainTypeDuration {
+ if result.Type == models.TypeCloudBrainOne {
+ cloudBrainOneDuration = result.DurationSum
+ cloudBrainOneCardDuSum = result.CardDurationSum
}
- if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
- workServerNumber = int64(cloudbrain.Cloudbrain.WorkServerNumber)
- } else {
- workServerNumber = 1
+ if result.Type == models.TypeCloudBrainTwo {
+ cloudBrainTwoDuration = result.DurationSum
+ cloudBrainTwoCardDuSum = result.CardDurationSum
}
- duration := models.ConvertStrToDuration(cloudbrain.TrainJobDuration)
- CardDuration := workServerNumber * int64(cardNum) * duration
-
- if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainOne {
- cloudBrainOneDuration += duration
- cloudBrainOneCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeCloudBrainTwo {
- cloudBrainTwoDuration += duration
- cloudBrainTwoCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeC2Net {
- c2NetDuration += duration
- c2NetCardDuSum += CardDuration
- } else if cloudbrain.Cloudbrain.Type == models.TypeCDCenter {
- cDCenterDuration += duration
- cDNetCardDuSum += CardDuration
+ if result.Type == models.TypeC2Net {
+ c2NetDuration = result.DurationSum
+ c2NetCardDuSum = result.CardDurationSum
+ }
+ if result.Type == models.TypeCDCenter {
+ cDCenterDuration = result.DurationSum
+ cDNetCardDuSum = result.CardDurationSum
}
-
- durationAllSum += duration
- cardDuSum += CardDuration
}
+ cloudbrainAllDuration, err := models.GetCloudbrainAllCardDuration()
+ if err != nil {
+ log.Error("GetCloudbrainAllCardDuration err!", err)
+ return
+ }
+ durationSum = cloudbrainAllDuration.DurationSum
+ cardDurationSum = cloudbrainAllDuration.CardDurationSum
+
ctx.JSON(http.StatusOK, map[string]interface{}{
"cloudBrainOneCardDuSum": cloudBrainOneCardDuSum,
"cloudBrainTwoCardDuSum": cloudBrainTwoCardDuSum,
"c2NetCardDuSum": c2NetCardDuSum,
"cDNetCardDuSum": cDNetCardDuSum,
- "cardDuSum": cardDuSum,
+ "cardDuSum": cardDurationSum,
"cloudBrainOneDuration": cloudBrainOneDuration,
"cloudBrainTwoDuration": cloudBrainTwoDuration,
"c2NetDuration": c2NetDuration,
"cDCenterDuration": cDCenterDuration,
- "durationSum": durationAllSum,
+ "durationSum": durationSum,
})
}
@@ -968,6 +944,8 @@ func GetWaittingTop(ctx *context.Context) {
taskDetail.RepoID = ciTasks[i].RepoID
if ciTasks[i].Repo != nil {
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name
+ } else {
+ taskDetail.RepoName = ""
}
WaitTimeInt := time.Now().Unix() - ciTasks[i].Cloudbrain.CreatedUnix.AsTime().Unix()
taskDetail.WaitTime = models.ConvertDurationToStr(WaitTimeInt)
@@ -975,6 +953,13 @@ func GetWaittingTop(ctx *context.Context) {
if WaitTimeInt < 0 {
taskDetail.WaitTime = "00:00:00"
}
+
+ taskDetail.ID = ciTasks[i].Cloudbrain.ID
+ taskDetail.ComputeResource = ciTasks[i].Cloudbrain.ComputeResource
+ taskDetail.JobType = ciTasks[i].Cloudbrain.JobType
+ taskDetail.JobID = ciTasks[i].Cloudbrain.JobID
+ taskDetail.Type = ciTasks[i].Cloudbrain.Type
+
tasks = append(tasks, taskDetail)
}
ctx.JSON(http.StatusOK, map[string]interface{}{
@@ -1001,6 +986,12 @@ func GetRunningTop(ctx *context.Context) {
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name
}
+ taskDetail.ID = ciTasks[i].Cloudbrain.ID
+ taskDetail.ComputeResource = ciTasks[i].Cloudbrain.ComputeResource
+ taskDetail.JobType = ciTasks[i].Cloudbrain.JobType
+ taskDetail.JobID = ciTasks[i].Cloudbrain.JobID
+ taskDetail.Type = ciTasks[i].Cloudbrain.Type
+
tasks = append(tasks, taskDetail)
}
ctx.JSON(http.StatusOK, map[string]interface{}{
diff --git a/routers/api/v1/repo/images.go b/routers/api/v1/repo/images.go
index f0cb62980..e09ca260a 100644
--- a/routers/api/v1/repo/images.go
+++ b/routers/api/v1/repo/images.go
@@ -88,7 +88,7 @@ func getModelArtsImages(ctx *context.APIContext) {
}
func getC2netNpuImages(ctx *context.APIContext) {
- images, err := grampus.GetImages(grampus.ProcessorTypeNPU)
+ images, err := grampus.GetImages(grampus.ProcessorTypeNPU, string(models.JobTypeTrain))
var npuImageInfos []NPUImageINFO
if err != nil {
log.Error("GetImages failed:", err.Error())
diff --git a/routers/api/v1/repo/mlops.go b/routers/api/v1/repo/mlops.go
index 43969330d..322edc3e5 100644
--- a/routers/api/v1/repo/mlops.go
+++ b/routers/api/v1/repo/mlops.go
@@ -69,3 +69,17 @@ func GetRight(ctx *context.APIContext) {
})
}
+
+func GetCloudBrainJobId(ctx *context.APIContext) {
+ cloudbrains, err := models.GetCloudbrainsByDisplayJobName(ctx.Repo.Repository.ID, ctx.Query("jobType"), ctx.Query("name"))
+ if err != nil {
+ log.Warn("get cloudbrain by display name failed", err)
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": ""})
+ return
+ }
+ if len(cloudbrains) > 0 {
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": cloudbrains[0].JobID})
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]string{"jobId": ""})
+}
diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go
index 127ddd835..a0abab38b 100755
--- a/routers/api/v1/repo/modelarts.go
+++ b/routers/api/v1/repo/modelarts.go
@@ -39,7 +39,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) {
ID := ctx.Params(":id")
- job,err := cloudbrain.GetCloudBrainByIdOrJobId(ID)
+ job, err := cloudbrain.GetCloudBrainByIdOrJobId(ID, "id")
if err != nil {
ctx.NotFound(err)
diff --git a/routers/api/v1/repo/modelmanage.go b/routers/api/v1/repo/modelmanage.go
index 15260790d..3b0aed0d5 100644
--- a/routers/api/v1/repo/modelmanage.go
+++ b/routers/api/v1/repo/modelmanage.go
@@ -43,8 +43,14 @@ func QueryModelById(ctx *context.APIContext) {
routerRepo.QueryModelById(ctx.Context)
}
+func QueryModelByName(ctx *context.APIContext) {
+ log.Info("QueryModelByName by api.")
+ routerRepo.ShowSingleModel(ctx.Context)
+}
+
func QueryModelListForPredict(ctx *context.APIContext) {
log.Info("QueryModelListForPredict by api.")
+ ctx.Context.SetParams("isOnlyThisRepo", "true")
routerRepo.QueryModelListForPredict(ctx.Context)
}
@@ -88,6 +94,11 @@ func CreateModelConvert(ctx *context.APIContext) {
routerRepo.SaveModelConvert(ctx.Context)
}
+func StopModelConvert(ctx *context.APIContext) {
+ log.Info("StopModelConvert by api.")
+ routerRepo.StopModelConvertApi(ctx.Context)
+}
+
func ShowModelConvertPage(ctx *context.APIContext) {
log.Info("ShowModelConvertPage by api.")
modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context)
@@ -113,3 +124,12 @@ func QueryModelConvertById(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, nil)
}
}
+
+func QueryModelConvertByName(ctx *context.APIContext) {
+ modelResult, err := routerRepo.GetModelConvertByName(ctx.Context)
+ if err == nil {
+ ctx.JSON(http.StatusOK, modelResult)
+ } else {
+ ctx.JSON(http.StatusOK, nil)
+ }
+}
diff --git a/routers/api/v1/repo/topic.go b/routers/api/v1/repo/topic.go
index f4ff7a329..d2522c9ce 100644
--- a/routers/api/v1/repo/topic.go
+++ b/routers/api/v1/repo/topic.go
@@ -177,13 +177,25 @@ func AddTopic(ctx *context.APIContext) {
return
}
- _, err = models.AddTopic(ctx.Repo.Repository.ID, topicName)
+ topic, err := models.AddTopic(ctx.Repo.Repository.ID, topicName)
if err != nil {
log.Error("AddTopic failed: %v", err)
ctx.InternalServerError(err)
return
}
-
+ found := false
+ topicNames := make([]string, len(topics))
+ for i, t := range topics {
+ topicNames[i] = t.Name
+ if strings.EqualFold(topic.Name, t.Name) {
+ found = true
+ break
+ }
+ }
+ if !found && topic.Name != "" {
+ topicNames = append(topicNames, topic.Name)
+ }
+ models.UpdateRepoTopics(ctx.Repo.Repository.ID, topicNames)
ctx.Status(http.StatusNoContent)
}
diff --git a/routers/home.go b/routers/home.go
index 092b30fe3..40a41bd68 100755
--- a/routers/home.go
+++ b/routers/home.go
@@ -7,6 +7,7 @@ package routers
import (
"bytes"
+ "code.gitea.io/gitea/routers/response"
"encoding/json"
"net/http"
"strconv"
@@ -43,6 +44,8 @@ const (
tplHomeTerm base.TplName = "terms"
tplHomePrivacy base.TplName = "privacy"
tplResoruceDesc base.TplName = "resource_desc"
+ tplRepoSquare base.TplName = "explore/repos/square"
+ tplRepoSearch base.TplName = "explore/repos/search"
)
// Home render home page
@@ -296,6 +299,109 @@ func ExploreRepos(ctx *context.Context) {
})
}
+func GetRepoSquarePage(ctx *context.Context) {
+ ctx.Data["SquareBanners"] = repository.GetBanners()
+ ctx.Data["SquareTopics"] = repository.GetTopics()
+ ctx.Data["SquareRecommendRepos"] = repository.GetRecommendRepos()
+
+ repos, _ := repository.GetPreferredRepos()
+ ctx.Data["SquarePreferredRepos"] = repos
+ ctx.HTML(200, tplRepoSquare)
+}
+func GetRepoSearchPage(ctx *context.Context) {
+ ctx.Data["SquareTopics"] = repository.GetTopics()
+ ctx.HTML(200, tplRepoSearch)
+}
+
+func RepoSquare(ctx *context.Context) {
+ var result []*models.Repository4Card
+ var err error
+ switch ctx.Query("type") {
+ case "preferred":
+ result, err = repository.GetPreferredRepos()
+ case "incubation":
+ result, err = repository.GetIncubationRepos()
+ case "hot-paper":
+ result, err = repository.GetHotPaperRepos()
+ default:
+ result, err = repository.GetPreferredRepos()
+ }
+ if err != nil {
+ ctx.JSON(http.StatusOK, response.ResponseError(err))
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Repos"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+
+func ActiveUser(ctx *context.Context) {
+ var err error
+ var currentUserId int64
+ if ctx.User != nil {
+ currentUserId = ctx.User.ID
+ }
+ result, err := repository.GetActiveUser4Square(currentUserId)
+ if err != nil {
+ log.Error("ActiveUser err. %v", err)
+ ctx.JSON(http.StatusOK, response.Success())
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Users"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+func ActiveOrg(ctx *context.Context) {
+ result, err := repository.GetActiveOrgs()
+ if err != nil {
+ log.Error("ActiveOrg err. %v", err)
+ ctx.JSON(http.StatusOK, response.Success())
+ return
+ }
+ resultMap := make(map[string]interface{}, 0)
+ resultMap["Orgs"] = result
+ ctx.JSON(http.StatusOK, response.SuccessWithData(resultMap))
+}
+
+func RepoFind(ctx *context.Context) {
+ keyword := strings.Trim(ctx.Query("q"), " ")
+ topic := strings.Trim(ctx.Query("topic"), " ")
+ sort := strings.Trim(ctx.Query("sort"), " ")
+ page := ctx.QueryInt("page")
+ pageSize := ctx.QueryInt("pageSize")
+ if pageSize == 0 {
+ pageSize = 15
+ }
+ if pageSize > 100 {
+ ctx.JSON(http.StatusOK, response.ServerError("pageSize illegal"))
+ return
+ }
+ if page <= 0 {
+ page = 1
+ }
+
+ var ownerID int64
+ if ctx.User != nil && !ctx.User.IsAdmin {
+ ownerID = ctx.User.ID
+ }
+
+ result, err := repository.FindRepos(repository.FindReposOptions{
+ ListOptions: models.ListOptions{Page: page, PageSize: pageSize},
+ Actor: ctx.User,
+ Sort: sort,
+ Keyword: keyword,
+ Topic: topic,
+ Private: ctx.User != nil,
+ OwnerID: ownerID,
+ })
+ if err != nil {
+ log.Error("RepoFind error. %v", err)
+ ctx.JSON(http.StatusOK, response.ResponseError(err))
+ return
+ }
+ ctx.JSON(http.StatusOK, response.SuccessWithData(result))
+}
+
func ExploreDatasets(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("explore")
ctx.Data["PageIsExplore"] = true
diff --git a/routers/private/internal.go b/routers/private/internal.go
index 14b0f05de..0b8ae600a 100755
--- a/routers/private/internal.go
+++ b/routers/private/internal.go
@@ -6,6 +6,7 @@
package private
import (
+ "code.gitea.io/gitea/services/repository"
"strings"
"code.gitea.io/gitea/routers/admin"
@@ -55,7 +56,9 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration)
m.Post("/task/history_handle/aicenter", repo.HandleTaskWithAiCenter)
m.Post("/resources/specification/handle_historical_task", admin.RefreshHistorySpec)
+ m.Post("/repos/cnt_stat/handle_historical_task", admin.RefreshHistorySpec)
m.Post("/duration_statisctic/history_handle", repo.CloudbrainUpdateHistoryData)
+ m.Post("/square/repo/stat/refresh", repository.RefreshRepoStatData)
}, CheckInternalToken)
}
diff --git a/routers/repo/ai_model_convert.go b/routers/repo/ai_model_convert.go
index 4ba414bff..36664458e 100644
--- a/routers/repo/ai_model_convert.go
+++ b/routers/repo/ai_model_convert.go
@@ -573,13 +573,10 @@ func deleteCloudBrainTask(task *models.AiModelConvert) {
}
}
-func StopModelConvert(ctx *context.Context) {
- id := ctx.Params(":id")
- log.Info("stop model convert start.id=" + id)
+func stopModelConvert(id string) error {
job, err := models.QueryModelConvertById(id)
if err != nil {
- ctx.ServerError("Not found task.", err)
- return
+ return err
}
if job.IsGpuTrainTask() {
err = cloudbrain.StopJob(job.CloudBrainTaskId)
@@ -600,6 +597,35 @@ func StopModelConvert(ctx *context.Context) {
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateModelConvert failed:", err)
+ return err
+ }
+ return nil
+}
+
+func StopModelConvertApi(ctx *context.Context) {
+ id := ctx.Query("id")
+ log.Info("stop model convert start.id=" + id)
+ err := stopModelConvert(id)
+ if err == nil {
+ ctx.JSON(200, map[string]string{
+ "code": "0",
+ "msg": "succeed",
+ })
+ } else {
+ ctx.JSON(200, map[string]string{
+ "code": "1",
+ "msg": err.Error(),
+ })
+ }
+}
+
+func StopModelConvert(ctx *context.Context) {
+ id := ctx.Params(":id")
+ log.Info("stop model convert start.id=" + id)
+ err := stopModelConvert(id)
+ if err != nil {
+ ctx.ServerError("Not found task.", err)
+ return
}
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelmanage/convert_model")
}
@@ -620,7 +646,7 @@ func ShowModelConvertInfo(ctx *context.Context) {
return
}
ctx.Data["Name"] = job.Name
- ctx.Data["canDownload"] = isOper(ctx, job.UserId)
+ ctx.Data["canDownload"] = isOperModifyOrDelete(ctx, job.UserId)
user, err := models.GetUserByID(job.UserId)
if err == nil {
job.UserName = user.Name
@@ -732,6 +758,11 @@ func GetModelConvertById(ctx *context.Context) (*models.AiModelConvert, error) {
return models.QueryModelConvertById(id)
}
+func GetModelConvertByName(ctx *context.Context) ([]*models.AiModelConvert, error) {
+ name := ctx.Query("name")
+ return models.QueryModelConvertByName(name, ctx.Repo.Repository.ID)
+}
+
func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) {
page := ctx.QueryInt("page")
if page <= 0 {
@@ -755,7 +786,7 @@ func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, in
}
userIds := make([]int64, len(modelResult))
for i, model := range modelResult {
- model.IsCanOper = isOper(ctx, model.UserId)
+ model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
userIds[i] = model.UserId
}
diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go
index 7eedb9bc4..592194371 100644
--- a/routers/repo/ai_model_manage.go
+++ b/routers/repo/ai_model_manage.go
@@ -2,6 +2,7 @@ package repo
import (
"archive/zip"
+ "code.gitea.io/gitea/services/repository"
"encoding/json"
"errors"
"fmt"
@@ -93,7 +94,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
log.Info("accuracyJson=" + string(accuracyJson))
aiTask.ContainerIp = ""
aiTaskJson, _ := json.Marshal(aiTask)
-
+ isPrivate := ctx.QueryBool("isPrivate")
model := &models.AiModelManage{
ID: id,
Version: version,
@@ -114,6 +115,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
TrainTaskInfo: string(aiTaskJson),
Accuracy: string(accuracyJson),
Status: STATUS_COPY_MODEL,
+ IsPrivate: isPrivate,
}
err = models.SaveModelToDb(model)
@@ -169,10 +171,17 @@ func updateStatus(id string, modelSize int64, status int, modelPath string, stat
if len(statusDesc) > 400 {
statusDesc = statusDesc[0:400]
}
+ m, _ := models.QueryModelById(id)
err := models.ModifyModelStatus(id, modelSize, status, modelPath, statusDesc)
if err != nil {
log.Info("update status error." + err.Error())
}
+ if m != nil {
+ if modelSize > 0 && m.Size == 0 {
+ go repository.ResetRepoModelNum(m.RepoId)
+ }
+ }
+
}
func SaveNewNameModel(ctx *context.Context) {
@@ -216,6 +225,7 @@ func SaveLocalModel(ctx *context.Context) {
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
taskType := ctx.QueryInt("type")
+ isPrivate := ctx.QueryBool("isPrivate")
modelActualPath := ""
if taskType == models.TypeCloudBrainOne {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
@@ -262,6 +272,7 @@ func SaveLocalModel(ctx *context.Context) {
TrainTaskInfo: "",
Accuracy: "",
Status: STATUS_FINISHED,
+ IsPrivate: isPrivate,
}
err := models.SaveModelToDb(model)
@@ -305,13 +316,14 @@ func getSize(files []storage.FileInfo) int64 {
func UpdateModelSize(modeluuid string) {
model, err := models.QueryModelById(modeluuid)
if err == nil {
+ var size int64
if model.Type == models.TypeCloudBrainOne {
if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) {
files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:])
if err != nil {
log.Info("Failed to query model size from minio. id=" + modeluuid)
}
- size := getSize(files)
+ size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
} else if model.Type == models.TypeCloudBrainTwo {
@@ -320,10 +332,13 @@ func UpdateModelSize(modeluuid string) {
if err != nil {
log.Info("Failed to query model size from obs. id=" + modeluuid)
}
- size := getSize(files)
+ size = getSize(files)
models.ModifyModelSize(modeluuid, size)
}
}
+ if model.Size == 0 && size > 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
} else {
log.Info("not found model,uuid=" + modeluuid)
}
@@ -438,13 +453,14 @@ func DeleteModelFile(ctx *context.Context) {
fileName := ctx.Query("fileName")
model, err := models.QueryModelById(id)
if err == nil {
+ var totalSize int64
if model.ModelType == MODEL_LOCAL_TYPE {
if model.Type == models.TypeCloudBrainOne {
bucketName := setting.Attachment.Minio.Bucket
objectName := model.Path[len(bucketName)+1:] + fileName
log.Info("delete bucket=" + bucketName + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
- totalSize := storage.MinioGetFilesSize(bucketName, []string{objectName})
+ totalSize = storage.MinioGetFilesSize(bucketName, []string{objectName})
err := storage.Attachments.DeleteDir(objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -464,7 +480,7 @@ func DeleteModelFile(ctx *context.Context) {
objectName := model.Path[len(setting.Bucket)+1:] + fileName
log.Info("delete bucket=" + setting.Bucket + " path=" + objectName)
if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) {
- totalSize := storage.ObsGetFilesSize(bucketName, []string{objectName})
+ totalSize = storage.ObsGetFilesSize(bucketName, []string{objectName})
err := storage.ObsRemoveObject(bucketName, objectName)
if err != nil {
log.Info("Failed to delete model. id=" + id)
@@ -481,6 +497,9 @@ func DeleteModelFile(ctx *context.Context) {
}
}
}
+ if (model.Size - totalSize) <= 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
}
ctx.JSON(200, map[string]string{
"code": "0",
@@ -549,25 +568,14 @@ func deleteModelByID(ctx *context.Context, id string) error {
}
}
}
+ if model.Size > 0 {
+ go repository.ResetRepoModelNum(model.RepoId)
+ }
}
}
return err
}
-func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, int64, error) {
-
- return models.QueryModel(&models.AiModelQueryOptions{
- ListOptions: models.ListOptions{
- Page: page,
- PageSize: setting.UI.IssuePagingNum,
- },
- RepoID: repoId,
- Type: -1,
- New: MODEL_LATEST,
- Status: -1,
- })
-}
-
func DownloadMultiModelFile(ctx *context.Context) {
log.Info("DownloadMultiModelFile start.")
id := ctx.Query("id")
@@ -578,7 +586,7 @@ func DownloadMultiModelFile(ctx *context.Context) {
ctx.ServerError("no such model:", err)
return
}
- if !isOper(ctx, task.UserId) {
+ if !isCanDownload(ctx, task) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
return
}
@@ -806,7 +814,7 @@ func DownloadSingleModelFile(ctx *context.Context) {
ctx.ServerError("no such model:", err)
return
}
- if !isOper(ctx, task.UserId) {
+ if !isCanDownload(ctx, task) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
return
}
@@ -874,8 +882,9 @@ func QueryModelById(ctx *context.Context) {
id := ctx.Query("id")
model, err := models.QueryModelById(id)
if err == nil {
- model.IsCanOper = isOper(ctx, model.UserId)
+ model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
+ model.IsCanDownload = isCanDownload(ctx, model)
removeIpInfo(model)
ctx.JSON(http.StatusOK, model)
} else {
@@ -891,7 +900,8 @@ func ShowSingleModel(ctx *context.Context) {
userIds := make([]int64, len(models))
for i, model := range models {
- model.IsCanOper = isOper(ctx, model.UserId)
+ model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId)
+ model.IsCanDownload = isCanDownload(ctx, model)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
userIds[i] = model.UserId
}
@@ -941,7 +951,8 @@ func ShowOneVersionOtherModel(ctx *context.Context) {
userIds := make([]int64, len(aimodels))
for i, model := range aimodels {
- model.IsCanOper = isOper(ctx, model.UserId)
+ model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId)
+ model.IsCanDownload = isCanDownload(ctx, model)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
userIds[i] = model.UserId
}
@@ -964,6 +975,7 @@ func ShowOneVersionOtherModel(ctx *context.Context) {
}
func SetModelCount(ctx *context.Context) {
+ isQueryPrivate := isQueryPrivateModel(ctx)
repoId := ctx.Repo.Repository.ID
Type := -1
_, count, _ := models.QueryModel(&models.AiModelQueryOptions{
@@ -971,10 +983,12 @@ func SetModelCount(ctx *context.Context) {
Page: 1,
PageSize: 2,
},
- RepoID: repoId,
- Type: Type,
- New: MODEL_LATEST,
- Status: -1,
+ RepoID: repoId,
+ Type: Type,
+ New: MODEL_LATEST,
+ IsOnlyThisRepo: true,
+ Status: -1,
+ IsQueryPrivate: isQueryPrivate,
})
ctx.Data["MODEL_COUNT"] = count
}
@@ -1001,27 +1015,87 @@ func isQueryRight(ctx *context.Context) bool {
}
}
+func isCanDownload(ctx *context.Context, task *models.AiModelManage) bool {
+ if ctx.User == nil {
+ return false
+ }
+ isCollaborator, err := ctx.Repo.Repository.IsCollaborator(ctx.User.ID)
+ if err != nil {
+ log.Info("query error.")
+ }
+ isTeamMember, err := ctx.Repo.Repository.IsInRepoTeam(ctx.User.ID)
+ if err != nil {
+ log.Info("query IsInRepoTeam error." + err.Error())
+ }
+ if ctx.User.IsAdmin || ctx.User.ID == task.UserId || isCollaborator || isTeamMember {
+ return true
+ }
+ if ctx.Repo.IsOwner() {
+ return true
+ }
+ if !task.IsPrivate {
+ return true
+ }
+ return false
+}
+
+func isQueryPrivateModel(ctx *context.Context) bool {
+ if ctx.User == nil {
+ return false
+ }
+ isCollaborator, err := ctx.Repo.Repository.IsCollaborator(ctx.User.ID)
+ if err != nil {
+ log.Info("query IsCollaborator error." + err.Error())
+ }
+ isTeamMember, err := ctx.Repo.Repository.IsInRepoTeam(ctx.User.ID)
+ if err != nil {
+ log.Info("query IsInRepoTeam error." + err.Error())
+ }
+ if ctx.User.IsAdmin || isCollaborator || isTeamMember {
+ return true
+ }
+ if ctx.Repo.IsOwner() {
+ return true
+ }
+ return false
+}
+
func isCanDelete(ctx *context.Context, modelUserId int64) bool {
if ctx.User == nil {
return false
}
- if ctx.User.IsAdmin || ctx.User.ID == modelUserId {
+ if ctx.User.ID == modelUserId {
+ return true
+ }
+ return isAdminRight(ctx)
+}
+
+func isAdminRight(ctx *context.Context) bool {
+ if ctx.User.IsAdmin {
return true
}
if ctx.Repo.IsOwner() {
return true
}
+ permission, err := models.GetUserRepoPermission(ctx.Repo.Repository, ctx.User)
+ if err != nil {
+ log.Error("GetUserRepoPermission failed:%v", err.Error())
+ return false
+ }
+ if permission.AccessMode >= models.AccessModeAdmin {
+ return true
+ }
return false
}
-func isOper(ctx *context.Context, modelUserId int64) bool {
+func isOperModifyOrDelete(ctx *context.Context, modelUserId int64) bool {
if ctx.User == nil {
return false
}
if ctx.User.IsAdmin || ctx.User.ID == modelUserId {
return true
}
- return false
+ return isAdminRight(ctx)
}
func ShowModelPageInfo(ctx *context.Context) {
@@ -1038,6 +1112,7 @@ func ShowModelPageInfo(ctx *context.Context) {
if pageSize <= 0 {
pageSize = setting.UI.IssuePagingNum
}
+ isQueryPrivate := isQueryPrivateModel(ctx)
repoId := ctx.Repo.Repository.ID
Type := -1
modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{
@@ -1045,10 +1120,12 @@ func ShowModelPageInfo(ctx *context.Context) {
Page: page,
PageSize: pageSize,
},
- RepoID: repoId,
- Type: Type,
- New: MODEL_LATEST,
- Status: -1,
+ RepoID: repoId,
+ Type: Type,
+ New: MODEL_LATEST,
+ IsOnlyThisRepo: true,
+ Status: -1,
+ IsQueryPrivate: isQueryPrivate,
})
if err != nil {
ctx.ServerError("Cloudbrain", err)
@@ -1057,8 +1134,9 @@ func ShowModelPageInfo(ctx *context.Context) {
userIds := make([]int64, len(modelResult))
for i, model := range modelResult {
- model.IsCanOper = isOper(ctx, model.UserId)
+ model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
+ model.IsCanDownload = isCanDownload(ctx, model)
userIds[i] = model.UserId
}
@@ -1089,6 +1167,37 @@ func ModifyModel(id string, description string) error {
return err
}
+func ModifyModelPrivate(ctx *context.Context) {
+ id := ctx.Query("id")
+ isPrivate := ctx.QueryBool("isPrivate")
+ re := map[string]string{
+ "code": "-1",
+ }
+ task, err := models.QueryModelById(id)
+ if err != nil || task == nil {
+ re["msg"] = err.Error()
+ log.Error("no such model!", err.Error())
+ ctx.JSON(200, re)
+ return
+ }
+ if !isOperModifyOrDelete(ctx, task.UserId) {
+ re["msg"] = "No right to operation."
+ ctx.JSON(200, re)
+ return
+ }
+ err = models.ModifyModelPrivate(id, isPrivate)
+ if err == nil {
+ re["code"] = "0"
+ ctx.JSON(200, re)
+ log.Info("modify success.")
+ } else {
+ re["msg"] = err.Error()
+ ctx.JSON(200, re)
+ log.Info("Failed to modify.id=" + id + " isprivate=" + fmt.Sprint(isPrivate) + " error:" + err.Error())
+ }
+
+}
+
func ModifyModelInfo(ctx *context.Context) {
log.Info("modify model start.")
id := ctx.Query("id")
@@ -1102,7 +1211,7 @@ func ModifyModelInfo(ctx *context.Context) {
ctx.JSON(200, re)
return
}
- if !isOper(ctx, task.UserId) {
+ if !isOperModifyOrDelete(ctx, task.UserId) {
re["msg"] = "No right to operation."
ctx.JSON(200, re)
return
@@ -1112,6 +1221,7 @@ func ModifyModelInfo(ctx *context.Context) {
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
+ isPrivate := ctx.QueryBool("isPrivate")
aimodels := models.QueryModelByName(name, task.RepoId)
if aimodels != nil && len(aimodels) > 0 {
if len(aimodels) == 1 {
@@ -1126,14 +1236,14 @@ func ModifyModelInfo(ctx *context.Context) {
return
}
}
- err = models.ModifyLocalModel(id, name, label, description, engine)
+ err = models.ModifyLocalModel(id, name, label, description, engine, isPrivate)
} else {
label := ctx.Query("label")
description := ctx.Query("description")
engine := task.Engine
name := task.Name
- err = models.ModifyLocalModel(id, name, label, description, int(engine))
+ err = models.ModifyLocalModel(id, name, label, description, int(engine), task.IsPrivate)
}
if err != nil {
@@ -1148,15 +1258,27 @@ func ModifyModelInfo(ctx *context.Context) {
func QueryModelListForPredict(ctx *context.Context) {
repoId := ctx.Repo.Repository.ID
+ page := ctx.QueryInt("page")
+ if page <= 0 {
+ page = -1
+ }
+ pageSize := ctx.QueryInt("pageSize")
+ if pageSize <= 0 {
+ pageSize = -1
+ }
+ isQueryPrivate := isQueryPrivateModel(ctx)
+ //IsOnlyThisRepo := ctx.QueryBool("isOnlyThisRepo")
modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{
ListOptions: models.ListOptions{
- Page: -1,
- PageSize: -1,
+ Page: page,
+ PageSize: pageSize,
},
- RepoID: repoId,
- Type: ctx.QueryInt("type"),
- New: -1,
- Status: 0,
+ RepoID: repoId,
+ Type: ctx.QueryInt("type"),
+ New: -1,
+ Status: 0,
+ IsOnlyThisRepo: true,
+ IsQueryPrivate: isQueryPrivate,
})
if err != nil {
ctx.ServerError("Cloudbrain", err)
@@ -1168,7 +1290,9 @@ func QueryModelListForPredict(ctx *context.Context) {
nameMap := make(map[string][]*models.AiModelManage)
for _, model := range modelResult {
- removeIpInfo(model)
+ model.TrainTaskInfo = ""
+ model.Accuracy = ""
+ //removeIpInfo(model)
if _, value := nameMap[model.Name]; !value {
models := make([]*models.AiModelManage, 0)
models = append(models, model)
@@ -1195,19 +1319,25 @@ func QueryModelFileForPredict(ctx *context.Context) {
func QueryModelFileByID(id string) []storage.FileInfo {
model, err := models.QueryModelById(id)
- if err == nil {
- if model.Type == models.TypeCloudBrainTwo {
- prefix := model.Path[len(setting.Bucket)+1:]
- fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
- return fileinfos
- } else if model.Type == models.TypeCloudBrainOne {
- prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
- fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
- return fileinfos
- }
- } else {
+ if err != nil {
log.Error("no such model!", err.Error())
+ return nil
}
+ return QueryModelFileByModel(model)
+}
+
+func QueryModelFileByModel(model *models.AiModelManage) []storage.FileInfo {
+
+ if model.Type == models.TypeCloudBrainTwo {
+ prefix := model.Path[len(setting.Bucket)+1:]
+ fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
+ return fileinfos
+ } else if model.Type == models.TypeCloudBrainOne {
+ prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
+ fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
+ return fileinfos
+ }
+
return nil
}
diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go
index 6176fcda5..55f25dba6 100644
--- a/routers/repo/aisafety.go
+++ b/routers/repo/aisafety.go
@@ -847,6 +847,9 @@ func createForGPU(ctx *context.Context, jobName string) error {
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
os.RemoveAll(codePath)
+ gitRepo, _ := git.OpenRepository(repo.RepoPath())
+ commitID, _ := gitRepo.GetBranchCommitID(cloudbrain.DefaultBranchName)
+
if err := downloadCode(repo, codePath, cloudbrain.DefaultBranchName); err != nil {
log.Error("downloadCode failed, %v", err, ctx.Data["MsgID"])
return errors.New("system error")
@@ -891,7 +894,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
BranchName: cloudbrain.DefaultBranchName,
BootFile: BootFile,
Params: Params,
- CommitID: "",
+ CommitID: commitID,
ModelName: modelName,
ModelVersion: modelVersion,
CkptName: CkptName,
diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go
index 240e78acc..e1de71345 100755
--- a/routers/repo/attachment.go
+++ b/routers/repo/attachment.go
@@ -29,6 +29,7 @@ import (
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/upload"
"code.gitea.io/gitea/modules/worker"
+ repo_service "code.gitea.io/gitea/services/repository"
gouuid "github.com/satori/go.uuid"
)
@@ -180,6 +181,7 @@ func DeleteAttachment(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("DeleteAttachment: %v", err))
return
}
+ go repo_service.DecreaseRepoDatasetNum(attach.DatasetID)
attachjson, _ := json.Marshal(attach)
labelmsg.SendDeleteAttachToLabelSys(string(attachjson))
@@ -894,6 +896,7 @@ func CompleteMultipart(ctx *context.Context) {
return
}
attachment.UpdateDatasetUpdateUnix()
+ go repo_service.IncreaseRepoDatasetNum(dataset.ID)
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {
diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index 3bf3c9dbd..6f1995380 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -81,6 +81,7 @@ var (
const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types"
const CLONE_FILE_PREFIX = "file:///"
+const README = "README"
var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0)
@@ -385,6 +386,13 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
}
if form.ModelName != "" { //使用预训练模型训练
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ cloudBrainNewDataPrepare(ctx, jobType)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form)
+ return
+ }
req.ModelName = form.ModelName
req.LabelName = form.LabelName
req.CkptName = form.CkptName
@@ -423,8 +431,13 @@ func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchNa
return "cloudbrain.load_code_failed"
}
+ return initModelPath(jobName, resultPath)
+
+}
+
+func initModelPath(jobName string, resultPath string) string {
modelPath := setting.JobPath + jobName + resultPath + "/"
- err = mkModelPath(modelPath)
+ err := mkModelPath(modelPath)
if err != nil {
return "cloudbrain.load_code_failed"
}
@@ -682,6 +695,13 @@ func CloudBrainRestart(ctx *context.Context) {
break
}
+ if _, err := os.Stat(getOldJobPath(task)); err != nil {
+ log.Error("Can not find job minio path", err)
+ resultCode = "-1"
+ errorMsg = ctx.Tr("cloudbrain.result_cleared")
+ break
+ }
+
count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, string(models.JobTypeDebug))
if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
@@ -696,6 +716,17 @@ func CloudBrainRestart(ctx *context.Context) {
break
}
}
+ if !HasModelFile(task) {
+ resultCode = "-1"
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+
+ if hasDatasetDeleted(task) {
+ resultCode = "-1"
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
err = cloudbrain.RestartTask(ctx, task, &ID)
if err != nil {
@@ -716,6 +747,44 @@ func CloudBrainRestart(ctx *context.Context) {
})
}
+func hasDatasetDeleted(task *models.Cloudbrain) bool {
+ if task.Uuid == "" {
+ return false
+ }
+ uuids := strings.Split(task.Uuid, ";")
+ attachs, _ := models.GetAttachmentsByUUIDs(uuids)
+ return len(attachs) < len(uuids)
+}
+
+func HasModelFile(task *models.Cloudbrain) bool {
+ if task.PreTrainModelUrl == "" {
+ return true
+ }
+
+ model, err := models.QueryModelByPath(task.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ return false
+ }
+
+ fileInfos := QueryModelFileByModel(model)
+ isFind := false
+ if fileInfos != nil {
+ for _, fileInfo := range fileInfos {
+ if fileInfo.FileName == task.CkptName {
+ isFind = true
+ break
+ }
+ }
+
+ }
+ return isFind
+}
+
+func getOldJobPath(task *models.Cloudbrain) string {
+ return setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + task.JobName
+}
+
func CloudBrainBenchMarkShow(ctx *context.Context) {
cloudBrainShow(ctx, tplCloudBrainBenchmarkShow, models.JobTypeBenchmark)
}
@@ -1758,7 +1827,7 @@ func mkPathAndReadMeFile(path string, text string) error {
return err
}
- fileName := path + "README"
+ fileName := path + README
f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error("OpenFile failed", err.Error())
@@ -1816,6 +1885,7 @@ func SyncCloudbrainStatus() {
if task.JobType == string(models.JobTypeModelSafety) {
continue
}
+
if task.Type == models.TypeCloudBrainOne {
task, err = cloudbrainTask.SyncCloudBrainOneStatus(task)
@@ -1824,32 +1894,7 @@ func SyncCloudbrainStatus() {
continue
}
- if task.Status != string(models.JobWaiting) {
- if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
- log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
- err = cloudbrain.StopJob(task.JobID)
- if err != nil {
- log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
- oldStatus := task.Status
- task.Status = string(models.JobStopped)
- if task.EndTime == 0 {
- task.EndTime = timeutil.TimeStampNow()
- }
- task.ComputeAndSetDuration()
- if oldStatus != task.Status {
- notification.NotifyChangeCloudbrainStatus(task, oldStatus)
- }
- err = models.UpdateJob(task)
- if err != nil {
- log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
- }
-
- }
- } else if task.Type == models.TypeCloudBrainTwo {
+ } else if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
if task.JobType == string(models.JobTypeDebug) {
err := modelarts.HandleNotebookInfo(task)
if err != nil {
@@ -1866,48 +1911,77 @@ func SyncCloudbrainStatus() {
log.Error("task.JobType(%s) is error:%s", task.DisplayJobName, task.JobType)
}
} else if task.Type == models.TypeC2Net {
- result, err := grampus.GetJob(task.JobID)
- if err != nil {
- log.Error("GetTrainJob(%s) failed:%v", task.DisplayJobName, err)
- continue
- }
-
- if result != nil {
- if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
- task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ if task.JobType == string(models.JobTypeDebug) {
+ cloudbrainTask.SyncGrampusNotebookStatus(task)
+ } else {
+ result, err := grampus.GetJob(task.JobID)
+ if err != nil {
+ log.Error("GetTrainJob(%s) failed:%v", task.DisplayJobName, err)
+ continue
}
- oldStatus := task.Status
- task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
- task.Duration = result.JobInfo.RunSec
- if task.Duration < 0 {
- task.Duration = 0
- }
- task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+ if result != nil {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
+ task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ oldStatus := task.Status
+ task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ task.Duration = result.JobInfo.RunSec
+
+ if task.Duration < 0 {
+ task.Duration = 0
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
- if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
- task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
+ task.EndTime = task.StartTime.Add(task.Duration)
+ }
+ task.CorrectCreateUnix()
+ if oldStatus != task.Status {
+ notification.NotifyChangeCloudbrainStatus(task, oldStatus)
+ if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 {
+ urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
+ }
+ }
+ }
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ continue
+ }
+ }
+ }
+ } else {
+ log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
+ }
+ if task.Status != string(models.JobWaiting) {
+ if task.Duration >= setting.MaxDuration && task.JobType == string(models.JobTypeDebug) {
+ log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
+ err = cloudbrainTask.StopDebugJob(task)
+ if err != nil {
+ log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
+ continue
}
- if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
- task.EndTime = task.StartTime.Add(task.Duration)
+ oldStatus := task.Status
+ task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
}
- task.CorrectCreateUnix()
+ task.ComputeAndSetDuration()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
- if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
- if len(result.JobInfo.Tasks[0].CenterID) == 1 {
- urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
- }
- }
}
err = models.UpdateJob(task)
if err != nil {
- log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
}
- } else {
- log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
+
}
}
diff --git a/routers/repo/cloudbrain_statistic.go b/routers/repo/cloudbrain_statistic.go
index 6ff377491..43c1ab1a4 100644
--- a/routers/repo/cloudbrain_statistic.go
+++ b/routers/repo/cloudbrain_statistic.go
@@ -29,17 +29,16 @@ func CloudbrainDurationStatisticHour() {
statisticTime = currentTime
}
- err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Add(-1*time.Hour).Unix()), timeutil.TimeStamp(currentTime.Unix()))
+ err = models.DeleteCloudbrainDurationStatistic(timeutil.TimeStamp(statisticTime.Unix()), timeutil.TimeStamp(currentTime.Unix()))
if err != nil {
log.Error("DeleteCloudbrainDurationStatistic failed", err)
}
-
+ statisticTime = statisticTime.Add(+1 * time.Hour)
for statisticTime.Before(currentTime) || statisticTime.Equal(currentTime) {
countEach := summaryDurationStat(statisticTime)
count += countEach
statisticTime = statisticTime.Add(+1 * time.Hour)
}
- log.Info("summaryDurationStat count: %v", count)
}
}
func UpdateDurationStatisticHistoryData(beginTime time.Time, endTime time.Time) int64 {
@@ -65,15 +64,18 @@ func summaryDurationStat(statisticTime time.Time) int64 {
ciTasks, err := models.GetCloudbrainByTime(beginTime, endTime)
if err != nil {
- log.Info("GetCloudbrainByTime err: %v", err)
+ log.Error("GetCloudbrainByTime err: %v", err)
return 0
}
- models.LoadSpecs4CloudbrainInfo(ciTasks)
- cloudBrainCenterCodeAndCardTypeInfo, cloudbrainMap := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, beginTime, endTime)
+ err = models.LoadSpecs4CloudbrainInfo(ciTasks)
+ if err != nil {
+ log.Error("LoadSpecs4CloudbrainInfo err: %v", err)
+ }
+ cloudBrainCenterCodeAndCardTypeInfo := getcloudBrainCenterCodeAndCardTypeInfo(ciTasks, int(beginTime), int(endTime))
resourceQueues, err := models.GetCanUseCardInfo()
if err != nil {
- log.Info("GetCanUseCardInfo err: %v", err)
+ log.Error("GetCanUseCardInfo err: %v", err)
return 0
}
@@ -85,56 +87,45 @@ func summaryDurationStat(statisticTime time.Time) int64 {
cardsTotalDurationMap[resourceQueue.Cluster+"/"+resourceQueue.AiCenterCode+"/"+resourceQueue.AccCardType] += resourceQueue.CardsTotalNum * 1 * 60 * 60
}
}
-
- for centerCode, CardTypes := range cloudBrainCenterCodeAndCardTypeInfo {
- for cardType, cardDuration := range CardTypes {
- cloudbrainTable := cloudbrainMap[centerCode+"/"+cardType]
- if cloudbrainTable != nil {
- if _, ok := cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType]; !ok {
- cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType] = 0
- }
- cloudbrainDurationStat := models.CloudbrainDurationStatistic{
- DateTimeUnix: dateTimeUnix,
- DayTime: dayTime,
- HourTime: hourTime,
- Cluster: cloudbrainTable.Cluster,
- AiCenterName: GetAiCenterNameByCode(centerCode, "zh-CN"),
- AiCenterCode: centerCode,
- AccCardType: cardType,
- CardsUseDuration: cardDuration,
- CardsTotalDuration: cardsTotalDurationMap[cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType],
- CreatedUnix: timeutil.TimeStampNow(),
- }
- if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
- log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
- }
- count++
- delete(cardsTotalDurationMap, cloudbrainTable.Cluster+"/"+centerCode+"/"+cardType)
- }
- }
- }
-
for key, cardsTotalDuration := range cardsTotalDurationMap {
- cloudbrainDurationStat := models.CloudbrainDurationStatistic{
- DateTimeUnix: dateTimeUnix,
- DayTime: dayTime,
- HourTime: hourTime,
- Cluster: strings.Split(key, "/")[0],
- AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
- AiCenterCode: strings.Split(key, "/")[1],
- AccCardType: strings.Split(key, "/")[2],
- CardsUseDuration: 0,
- CardsTotalDuration: cardsTotalDuration,
- CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
- CreatedUnix: timeutil.TimeStampNow(),
- }
- if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
- log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ if _, ok := cloudBrainCenterCodeAndCardTypeInfo[strings.Split(key, "/")[0]+"/"+strings.Split(key, "/")[1]][strings.Split(key, "/")[2]]; ok {
+ cloudbrainDurationStat := models.CloudbrainDurationStatistic{
+ DateTimeUnix: dateTimeUnix,
+ DayTime: dayTime,
+ HourTime: hourTime,
+ Cluster: strings.Split(key, "/")[0],
+ AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
+ AiCenterCode: strings.Split(key, "/")[1],
+ AccCardType: strings.Split(key, "/")[2],
+ CardsUseDuration: cloudBrainCenterCodeAndCardTypeInfo[strings.Split(key, "/")[0]+"/"+strings.Split(key, "/")[1]][strings.Split(key, "/")[2]],
+ CardsTotalDuration: cardsTotalDuration,
+ CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
+ log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ }
+ count++
+ } else {
+ cloudbrainDurationStat := models.CloudbrainDurationStatistic{
+ DateTimeUnix: dateTimeUnix,
+ DayTime: dayTime,
+ HourTime: hourTime,
+ Cluster: strings.Split(key, "/")[0],
+ AiCenterName: GetAiCenterNameByCode(strings.Split(key, "/")[1], "zh-CN"),
+ AiCenterCode: strings.Split(key, "/")[1],
+ AccCardType: strings.Split(key, "/")[2],
+ CardsUseDuration: 0,
+ CardsTotalDuration: cardsTotalDuration,
+ CardsTotalNum: cardsTotalDuration / 1 / 60 / 60,
+ CreatedUnix: timeutil.TimeStampNow(),
+ }
+ if _, err = models.InsertCloudbrainDurationStatistic(&cloudbrainDurationStat); err != nil {
+ log.Error("Insert cloudbrainDurationStat failed: %v", err.Error())
+ }
+ count++
}
- count++
}
-
- log.Info("finish summary cloudbrainDurationStat")
return count
}
@@ -153,33 +144,21 @@ func GetAiCenterNameByCode(centerCode string, language string) string {
return aiCenterName
}
-func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, beginTime int64, endTime int64) (map[string]map[string]int, map[string]*models.Cloudbrain) {
+func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, hourBeginTime int, hourEndTime int) map[string]map[string]int {
var WorkServerNumber int
var AccCardsNum int
- cloudbrainMap := make(map[string]*models.Cloudbrain)
cloudBrainCenterCodeAndCardType := make(map[string]map[string]int)
for _, cloudbrain := range ciTasks {
- if cloudbrain.Cloudbrain.StartTime == 0 {
- cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
- }
- if cloudbrain.Cloudbrain.EndTime == 0 {
- cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix())
- }
- cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
- if cloudbrain.Cloudbrain.Spec != nil {
- if _, ok := cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if cloudbrain.Cloudbrain.Spec != nil {
- cloudbrainMap[cloudbrain.Cloudbrain.AiCenter+"/"+cloudbrain.Cloudbrain.Spec.AccCardType] = &cloudbrain.Cloudbrain
- }
- }
- }
-
cloudbrain = cloudbrainService.UpdateCloudbrainAiCenter(cloudbrain)
if cloudbrain.Cloudbrain.StartTime == 0 {
cloudbrain.Cloudbrain.StartTime = cloudbrain.Cloudbrain.CreatedUnix
}
if cloudbrain.Cloudbrain.EndTime == 0 {
- cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.UpdatedUnix
+ if cloudbrain.Cloudbrain.Status == string(models.JobRunning) {
+ cloudbrain.Cloudbrain.EndTime = timeutil.TimeStamp(time.Now().Unix())
+ } else {
+ cloudbrain.Cloudbrain.EndTime = cloudbrain.Cloudbrain.StartTime + timeutil.TimeStamp(cloudbrain.Cloudbrain.Duration)
+ }
}
if cloudbrain.Cloudbrain.WorkServerNumber >= 1 {
WorkServerNumber = cloudbrain.Cloudbrain.WorkServerNumber
@@ -191,55 +170,36 @@ func getcloudBrainCenterCodeAndCardTypeInfo(ciTasks []*models.CloudbrainInfo, be
} else {
AccCardsNum = cloudbrain.Cloudbrain.Spec.AccCardsNum
}
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter]; !ok {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter] = make(map[string]int)
+ if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter]; !ok {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter] = make(map[string]int)
}
+ taskStartTime := int(cloudbrain.Cloudbrain.StartTime)
+ taskEndTime := int(cloudbrain.Cloudbrain.EndTime)
if cloudbrain.Cloudbrain.Spec != nil {
- if cloudbrain.Cloudbrain.Status == string(models.ModelArtsRunning) && cloudbrain.Cloudbrain.DeletedAt.IsZero() {
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = 0
- }
- } else {
- if int64(cloudbrain.Cloudbrain.StartTime) < beginTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) < endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) >= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += 0
- }
+ if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
+ if taskStartTime < hourBeginTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (taskEndTime - hourBeginTime)
+ } else if taskStartTime < hourBeginTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (hourEndTime - hourBeginTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (taskEndTime - taskStartTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (hourEndTime - taskStartTime)
}
} else {
- if _, ok := cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType]; !ok {
- if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] = AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- }
- } else {
- if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(beginTime))
- } else if int64(cloudbrain.Cloudbrain.StartTime) <= beginTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(beginTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) <= endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(cloudbrain.Cloudbrain.EndTime) - int(cloudbrain.Cloudbrain.StartTime))
- } else if beginTime <= int64(cloudbrain.Cloudbrain.StartTime) && int64(cloudbrain.Cloudbrain.StartTime) <= endTime && int64(cloudbrain.Cloudbrain.EndTime) > endTime {
- cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (int(endTime) - int(cloudbrain.Cloudbrain.StartTime))
- }
+ if taskStartTime < hourBeginTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (taskEndTime - hourBeginTime)
+ } else if taskStartTime < hourBeginTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (hourEndTime - hourBeginTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime >= hourBeginTime && taskEndTime <= hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (taskEndTime - taskStartTime)
+ } else if taskStartTime >= hourBeginTime && taskStartTime <= hourEndTime && taskEndTime > hourEndTime {
+ cloudBrainCenterCodeAndCardType[cloudbrain.Cloudbrain.Cluster+"/"+cloudbrain.Cloudbrain.AiCenter][cloudbrain.Cloudbrain.Spec.AccCardType] += AccCardsNum * WorkServerNumber * (hourEndTime - taskStartTime)
}
}
}
}
-
- return cloudBrainCenterCodeAndCardType, cloudbrainMap
+ return cloudBrainCenterCodeAndCardType
}
func CloudbrainUpdateHistoryData(ctx *context.Context) {
diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go
index 8f3182758..14db1a50d 100755
--- a/routers/repo/grampus.go
+++ b/routers/repo/grampus.go
@@ -44,14 +44,37 @@ import (
const (
tplGrampusTrainJobShow base.TplName = "repo/grampus/trainjob/show"
+ tplGrampusNotebookShow base.TplName = "repo/grampus/notebook/show"
//GPU
+ tplGrampusNotebookGPUNew base.TplName = "repo/grampus/notebook/gpu/new"
tplGrampusTrainJobGPUNew base.TplName = "repo/grampus/trainjob/gpu/new"
//NPU
+ tplGrampusNotebookNPUNew base.TplName = "repo/grampus/notebook/npu/new"
tplGrampusTrainJobNPUNew base.TplName = "repo/grampus/trainjob/npu/new"
)
+func GrampusNotebookNew(ctx *context.Context) {
+ ctx.Data["IsCreate"] = true
+ notebookType := ctx.QueryInt("type")
+ processType := grampus.ProcessorTypeGPU
+ if notebookType == 1 {
+ processType = grampus.ProcessorTypeNPU
+ }
+ err := grampusNotebookNewDataPrepare(ctx, processType)
+ if err != nil {
+ ctx.ServerError("get new notebook-job info failed", err)
+ return
+ }
+ if processType == grampus.ProcessorTypeGPU {
+ ctx.HTML(http.StatusOK, tplGrampusNotebookGPUNew)
+ } else {
+ ctx.HTML(http.StatusOK, tplGrampusNotebookNPUNew)
+ }
+
+}
+
func GrampusTrainJobGPUNew(ctx *context.Context) {
ctx.Data["IsCreate"] = true
err := grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -72,57 +95,262 @@ func GrampusTrainJobNPUNew(ctx *context.Context) {
}
ctx.HTML(200, tplGrampusTrainJobNPUNew)
}
+func GrampusNotebookCreate(ctx *context.Context, form auth.CreateGrampusNotebookForm) {
+ ctx.Data["IsCreate"] = true
+ displayJobName := form.DisplayJobName
+ jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
+ uuid := form.Attachment
+ description := form.Description
+ repo := ctx.Repo.Repository
+ branchName := form.BranchName
+ image := strings.TrimSpace(form.Image)
-func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error {
+ codeStoragePath := setting.CBCodePathPrefix + jobName + cloudbrain.CodeMountPath + "/"
+
+ tpl := tplGrampusNotebookGPUNew
+ processType := grampus.ProcessorTypeGPU
+ computeSource := models.GPUResource
+ computeSourceSimple := models.GPU
+ if form.Type == 1 {
+ tpl = tplGrampusNotebookNPUNew
+ processType = grampus.ProcessorTypeNPU
+ computeSource = models.NPUResource
+ computeSourceSimple = models.NPU
+ codeStoragePath = grampus.JobPath + jobName + modelarts.CodePath
+ }
+
+ lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeDebug), displayJobName))
+ defer lock.UnLock()
+ isOk, err := lock.Lock(models.CloudbrainKeyDuration)
+ if !isOk {
+ log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_samejob_err"), tpl, &form)
+ return
+ }
+
+ if !jobNamePattern.MatchString(displayJobName) {
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpl, &form)
+ return
+ }
+
+ //check count limit
+ count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), computeSource)
+ if err != nil {
+ log.Error("GetGrampusCountByUserID failed:%v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("system error", tpl, &form)
+ return
+ } else {
+ if count >= 1 {
+ log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("you have already a running or waiting task, can not create more", tpl, &form)
+ return
+ }
+ }
+
+ //check whether the task name in the project is duplicated
+ tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeDebug), displayJobName)
+ if err == nil {
+ if len(tasks) != 0 {
+ log.Error("the job name did already exist", ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("the job name did already exist", tpl, &form)
+ return
+ }
+ } else {
+ if !models.IsErrJobNotExist(err) {
+ log.Error("system error, %v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("system error", tpl, &form)
+ return
+ }
+ }
+
+ //check specification
+ spec, err := resource.GetAndCheckSpec(ctx.User.ID, form.SpecId, models.FindSpecsOptions{
+ JobType: models.JobTypeDebug,
+ ComputeResource: computeSourceSimple,
+ Cluster: models.C2NetCluster,
+ })
+ if err != nil || spec == nil {
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr("Resource specification not available", tpl, &form)
+ return
+ }
+
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("points.insufficient_points_balance"), tpl, &form)
+ return
+ }
+
+ var datasetInfos map[string]models.DatasetInfo
+ var datasetNames string
+ //var
+ if uuid != "" {
+ datasetInfos, datasetNames, err = models.GetDatasetInfo(uuid, computeSourceSimple)
+ if err != nil {
+ log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
+ return
+ }
+ }
+
+ //prepare code and out path
+ codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/"
+ _, err = ioutil.ReadDir(codeLocalPath)
+ if err == nil {
+ os.RemoveAll(codeLocalPath)
+ }
+
+ if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
+ log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+
+ if processType == grampus.ProcessorTypeGPU {
+ if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil {
+ log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+
+ } else {
+
+ if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
+ log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("cloudbrain.load_code_failed"), tpl, &form)
+ return
+ }
+ }
+
+ commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)
+
+ req := &grampus.GenerateNotebookJobReq{
+ JobName: jobName,
+ DisplayJobName: displayJobName,
+ ComputeResource: computeSource,
+ ProcessType: processType,
+ ImageUrl: image,
+ ImageId: form.ImageID,
+ Description: description,
+ Uuid: uuid,
+ CommitID: commitID,
+ BranchName: branchName,
+ DatasetNames: datasetNames,
+ DatasetInfos: datasetInfos,
+ Spec: spec,
+ CodeStoragePath: codeStoragePath,
+ CodeName: strings.ToLower(repo.Name),
+ }
+
+ if form.ModelName != "" { //使用预训练模型训练
+
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ grampusNotebookNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form)
+ return
+ }
+ req.ModelName = form.ModelName
+ req.LabelName = form.LabelName
+ req.CkptName = form.CkptName
+ req.ModelVersion = form.ModelVersion
+ req.PreTrainModelUrl = form.PreTrainModelUrl
+ req.PreTrainModelPath = getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)
+
+ }
+
+ _, err = grampus.GenerateNotebookJob(ctx, req)
+ if err != nil {
+ log.Error("GenerateNotebookJob failed:%v", err.Error(), ctx.Data["MsgID"])
+ grampusTrainJobNewDataPrepare(ctx, processType)
+ ctx.RenderWithErr(err.Error(), tpl, &form)
+ return
+ }
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=all")
+}
+func grampusNotebookNewDataPrepare(ctx *context.Context, processType string) error {
ctx.Data["PageIsCloudBrain"] = true
var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name)
ctx.Data["display_job_name"] = displayJobName
//get valid images
- images, err := grampus.GetImages(processType)
+ if processType == grampus.ProcessorTypeNPU {
+ images, err := grampus.GetImages(processType, string(models.JobTypeDebug))
+ if err != nil {
+ log.Error("GetImages failed:", err.Error())
+ } else {
+ ctx.Data["images"] = images.Infos
+ }
+ }
+ //prepare available specs
+ computeResourceSimple := models.GPU
+ datasetType := models.TypeCloudBrainOne
+ computeResource := models.GPUResource
+ if processType == grampus.ProcessorTypeNPU {
+ computeResourceSimple = models.NPU
+ datasetType = models.TypeCloudBrainTwo
+ computeResource = models.NPUResource
+ }
+
+ prepareGrampusSpecs(ctx, computeResourceSimple, models.JobTypeDebug)
+
+ //get branches
+ branches, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
- log.Error("GetImages failed:", err.Error())
+ log.Error("GetBranches error:", err.Error())
} else {
- ctx.Data["images"] = images.Infos
+ ctx.Data["branches"] = branches
}
- grampus.InitSpecialPool()
+ ctx.Data["branchName"] = ctx.Repo.BranchName
- ctx.Data["GPUEnabled"] = true
- ctx.Data["NPUEnabled"] = true
- includeCenters := make(map[string]struct{})
- excludeCenters := make(map[string]struct{})
- if grampus.SpecialPools != nil {
- for _, pool := range grampus.SpecialPools.Pools {
- if pool.IsExclusive {
- if !IsUserInOrgPool(ctx.User.ID, pool) {
- ctx.Data[pool.Type+"Enabled"] = false
- }
- } else {
- if strings.Contains(strings.ToLower(processType), strings.ToLower(pool.Type)) {
- if IsUserInOrgPool(ctx.User.ID, pool) {
- for _, center := range pool.Pool {
- includeCenters[center.Queue] = struct{}{}
- }
- } else {
- for _, center := range pool.Pool {
- excludeCenters[center.Queue] = struct{}{}
- }
+ ctx.Data["datasetType"] = datasetType
+ waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeC2Net, computeResource, models.JobTypeDebug)
+ ctx.Data["WaitCount"] = waitCount
+ NotStopTaskCount, _ := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), computeResource)
+ ctx.Data["NotStopTaskCount"] = NotStopTaskCount
- }
+ ctx.Data["code_path"] = cloudbrain.CodeMountPath
+ ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
+ ctx.Data["model_path"] = cloudbrain.ModelMountPath
- }
+ return nil
+}
- }
+func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) error {
+ ctx.Data["PageIsCloudBrain"] = true
+
+ var displayJobName = cloudbrainService.GetDisplayJobName(ctx.User.Name)
+ ctx.Data["display_job_name"] = displayJobName
+
+ //get valid images
+ if processType == grampus.ProcessorTypeNPU {
+ images, err := grampus.GetImages(processType, string(models.JobTypeTrain))
+ if err != nil {
+ log.Error("GetImages failed:", err.Error())
+ } else {
+ ctx.Data["images"] = images.Infos
}
}
//prepare available specs
if processType == grampus.ProcessorTypeNPU {
- prepareGrampusTrainSpecs(ctx, models.NPU)
+ prepareGrampusSpecs(ctx, models.NPU)
} else if processType == grampus.ProcessorTypeGPU {
- prepareGrampusTrainSpecs(ctx, models.GPU)
+ prepareGrampusSpecs(ctx, models.GPU)
}
//get branches
@@ -201,55 +429,19 @@ func GrampusTrainJobVersionNew(ctx *context.Context) {
}
}
-func prepareGrampusTrainSpecs(ctx *context.Context, computeResource string) {
+func prepareGrampusSpecs(ctx *context.Context, computeResource string, jobType ...models.JobType) {
+ tempJobType := models.JobTypeTrain
+ if len(jobType) > 0 {
+ tempJobType = jobType[0]
+ }
noteBookSpecs, _ := resource.FindAvailableSpecs(ctx.User.ID, models.FindSpecsOptions{
- JobType: models.JobTypeTrain,
+ JobType: tempJobType,
ComputeResource: computeResource,
Cluster: models.C2NetCluster,
})
ctx.Data["Specs"] = noteBookSpecs
}
-func getFilterSpecBySpecialPool(specs *models.GetGrampusResourceSpecsResult, includeCenters map[string]struct{}, excludeCenters map[string]struct{}) []models.GrampusSpec {
- if len(includeCenters) == 0 && len(excludeCenters) == 0 {
- return specs.Infos
- }
- var grampusSpecs []models.GrampusSpec
- for _, info := range specs.Infos {
- if isInIncludeCenters(info, includeCenters) || (len(excludeCenters) != 0 && isNotAllInExcludeCenters(info, excludeCenters)) {
- grampusSpecs = append(grampusSpecs, info)
- }
-
- }
- return grampusSpecs
-}
-
-func isInIncludeCenters(grampusSpec models.GrampusSpec, centers map[string]struct{}) bool {
- for _, center := range grampusSpec.Centers {
- if _, ok := centers[center.ID]; ok {
- return true
- }
- }
- return false
-}
-func isNotAllInExcludeCenters(grampusSpec models.GrampusSpec, centers map[string]struct{}) bool {
- for _, center := range grampusSpec.Centers {
- if _, ok := centers[center.ID]; !ok {
- return true
- }
- }
- return false
-}
-
-func IsUserInOrgPool(userId int64, pool *models.SpecialPool) bool {
- org, _ := models.GetOrgByName(pool.Org)
- if org != nil {
- isOrgMember, _ := models.IsOrganizationMember(org.ID, userId)
- return isOrgMember
- }
- return false
-}
-
func grampusParamCheckCreateTrainJob(form auth.CreateGrampusTrainJobForm) error {
if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") {
log.Error("the boot file(%s) must be a python file", form.BootFile)
@@ -721,30 +913,64 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
}
+func GetGrampusNotebook(ctx *context.APIContext) {
+ var (
+ err error
+ )
+
+ ID := ctx.Params(":id")
+ job, err := models.GetCloudbrainByID(ID)
+ if err != nil {
+ ctx.NotFound("", err)
+ log.Error("GetCloudbrainByID failed:", err)
+ return
+ }
+
+ jobAfter, err := cloudbrainTask.SyncGrampusNotebookStatus(job)
+
+ aiCenterName := cloudbrainService.GetAiCenterShow(jobAfter.AiCenter, ctx.Context)
+
+ if err != nil {
+ ctx.NotFound(err)
+ log.Error("Sync cloud brain one status failed:", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "ID": ID,
+ "JobName": jobAfter.JobName,
+ "JobStatus": jobAfter.Status,
+ "AiCenter": aiCenterName,
+ "CreatedTime": jobAfter.CreatedUnix.Format("2006-01-02 15:04:05"),
+ "CompletedTime": jobAfter.UpdatedUnix.Format("2006-01-02 15:04:05"),
+ "JobDuration": jobAfter.TrainJobDuration,
+ })
+}
+
func GrampusStopJob(ctx *context.Context) {
- var ID = ctx.Params(":jobid")
+ var ID = ctx.Params(":id")
var resultCode = "0"
var errorMsg = ""
var status = ""
task := ctx.Cloudbrain
for {
- if task.Status == string(models.GrampusStatusStopped) || task.Status == string(models.GrampusStatusFailed) || task.Status == string(models.GrampusStatusSucceeded) {
+ if task.Status == models.GrampusStatusStopped || task.Status == models.GrampusStatusFailed || task.Status == models.GrampusStatusSucceeded {
log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"])
resultCode = "-1"
- errorMsg = "system error"
+ errorMsg = ctx.Tr("cloudbrain.Already_stopped")
break
}
- res, err := grampus.StopJob(task.JobID)
+ res, err := grampus.StopJob(task.JobID, task.JobType)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
resultCode = strconv.Itoa(res.ErrorCode)
- errorMsg = res.ErrorMsg
+ errorMsg = ctx.Tr("cloudbrain.Stopped_failed")
break
}
oldStatus := task.Status
- task.Status = string(models.GrampusStatusStopped)
+ task.Status = getStopJobResponseStatus(res)
if task.EndTime == 0 {
task.EndTime = timeutil.TimeStampNow()
}
@@ -773,6 +999,33 @@ func GrampusStopJob(ctx *context.Context) {
})
}
+func getStopJobResponseStatus(res *models.GrampusStopJobResponse) string {
+ newStatus := models.GrampusStatusStopping
+ if res.Status != "" {
+ newStatus = grampus.TransTrainJobStatus(res.Status)
+ }
+ return newStatus
+}
+
+func GrampusNotebookDel(ctx *context.Context) {
+ var listType = ctx.Query("listType")
+ if err := deleteGrampusJob(ctx); err != nil {
+ log.Error("deleteGrampusJob failed: %v", err, ctx.Data["msgID"])
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+
+ var isAdminPage = ctx.Query("isadminpage")
+ var isHomePage = ctx.Query("ishomepage")
+ if ctx.IsUserSiteAdmin() && isAdminPage == "true" {
+ ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains")
+ } else if isHomePage == "true" {
+ ctx.Redirect(setting.AppSubURL + "/cloudbrains")
+ } else {
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=" + listType)
+ }
+}
+
func GrampusTrainJobDel(ctx *context.Context) {
var listType = ctx.Query("listType")
if err := deleteGrampusJob(ctx); err != nil {
@@ -795,9 +1048,9 @@ func GrampusTrainJobDel(ctx *context.Context) {
func deleteGrampusJob(ctx *context.Context) error {
task := ctx.Cloudbrain
- if task.Status != string(models.GrampusStatusStopped) && task.Status != string(models.GrampusStatusSucceeded) && task.Status != string(models.GrampusStatusFailed) {
+ if task.Status != models.GrampusStatusStopped && task.Status != models.GrampusStatusSucceeded && task.Status != models.GrampusStatusFailed {
log.Error("the job(%s) has not been stopped", task.JobName, ctx.Data["msgID"])
- return errors.New("the job has not been stopped")
+ return errors.New(ctx.Tr("cloudbrain.Not_Stopped"))
}
err := models.DeleteJob(task)
@@ -815,6 +1068,166 @@ func deleteGrampusJob(ctx *context.Context) error {
return nil
}
+type NotebookDataset struct {
+ DatasetUrl string `json:"dataset_url"`
+}
+
+func GrampusNotebookShow(ctx *context.Context) {
+ ctx.Data["PageIsCloudBrain"] = true
+
+ var task *models.Cloudbrain
+ task, err := models.GetCloudbrainByIDWithDeleted(ctx.Params(":id"))
+ if err != nil {
+ log.Error("GetCloudbrainByID failed:" + err.Error())
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ task.ContainerIp = ""
+
+ if task.DeletedAt.IsZero() && cloudbrainTask.IsTaskNotStop(task) { //normal record
+ result, err := grampus.GetNotebookJob(task.JobID)
+ if err != nil {
+ log.Error("GetJob failed:" + err.Error())
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+
+ if result != nil {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
+ task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ oldStatus := task.Status
+ task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ if task.Status != oldStatus || task.Status == models.GrampusStatusRunning {
+ task.Duration = result.JobInfo.RunSec
+ if task.Duration < 0 {
+ task.Duration = 0
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+
+ if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
+ task.EndTime = task.StartTime.Add(task.Duration)
+ }
+ task.CorrectCreateUnix()
+ if oldStatus != task.Status {
+ notification.NotifyChangeCloudbrainStatus(task, oldStatus)
+ if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
+ if len(result.JobInfo.Tasks[0].CenterID) == 1 {
+ urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
+ }
+ }
+ }
+ }
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob failed:" + err.Error())
+ }
+ }
+ }
+
+ if len(task.Parameters) > 0 {
+ var parameters models.Parameters
+ err := json.Unmarshal([]byte(task.Parameters), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal Parameters: %s (%v)", task.Parameters, err)
+ ctx.ServerError("system error", err)
+ return
+ }
+
+ if len(parameters.Parameter) > 0 {
+ paramTemp := ""
+ for _, Parameter := range parameters.Parameter {
+ param := Parameter.Label + " = " + Parameter.Value + "; "
+ paramTemp = paramTemp + param
+ }
+ task.Parameters = paramTemp[:len(paramTemp)-2]
+ } else {
+ task.Parameters = ""
+ }
+ }
+ user, err := models.GetUserByID(task.UserID)
+ if err == nil {
+ task.User = user
+ }
+
+ prepareSpec4Show(ctx, task)
+
+ ctx.Data["task"] = task
+ ctx.Data["datasetDownload"] = getDatasetDownloadInfo(ctx, task)
+ ctx.Data["modelDownload"] = getModelDownloadInfo(ctx, task)
+ ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
+ ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx)
+ ctx.Data["code_path"] = cloudbrain.CodeMountPath
+ ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
+ ctx.Data["model_path"] = cloudbrain.ModelMountPath
+ ctx.HTML(http.StatusOK, tplGrampusNotebookShow)
+}
+
+func getDatasetDownloadInfo(ctx *context.Context, task *models.Cloudbrain) []*models.DatasetDownload {
+ datasetDownload := make([]*models.DatasetDownload, 0)
+ if ctx.IsSigned {
+ if task.Uuid != "" && task.UserID == ctx.User.ID {
+ if task.IsGPUTask() {
+ return GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
+ } else {
+ datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
+ datasetObsUrlList := make([]NotebookDataset, 0)
+ _ = json.Unmarshal([]byte(task.DataUrl), &datasetObsUrlList)
+
+ for _, datasetInfo := range datasetDownload {
+
+ for _, datasetObs := range datasetObsUrlList {
+ log.Info("datasetObsUrl:" + datasetObs.DatasetUrl + "datasetName:" + datasetInfo.DatasetName)
+ if strings.Contains(datasetObs.DatasetUrl, datasetInfo.DatasetName) {
+ datasetInfo.DatasetDownloadLink = datasetObs.DatasetUrl
+ break
+ }
+ }
+
+ }
+
+ }
+
+ }
+ }
+
+ return datasetDownload
+}
+
+func getModelDownloadInfo(ctx *context.Context, task *models.Cloudbrain) *models.ModelDownload {
+ var modelDownload models.ModelDownload
+ if ctx.IsSigned {
+ if task.ModelName != "" && task.UserID == ctx.User.ID {
+ if task.IsNPUTask() {
+ modelDownload = models.ModelDownload{
+ Name: task.CkptName,
+ DownloadLink: "",
+ IsDelete: false,
+ }
+ if !HasModelFile(task) {
+ modelDownload.IsDelete = true
+ }
+ datasetObsUrlList := make([]NotebookDataset, 0)
+ _ = json.Unmarshal([]byte(task.DataUrl), &datasetObsUrlList)
+ for _, datasetObs := range datasetObsUrlList {
+ if strings.Contains(datasetObs.DatasetUrl, task.CkptName) {
+ modelDownload.DownloadLink = datasetObs.DatasetUrl
+ break
+ }
+ }
+
+ }
+
+ }
+
+ }
+
+ return &modelDownload
+}
+
func GrampusTrainJobShow(ctx *context.Context) {
ctx.Data["PageIsCloudBrain"] = true
@@ -1158,3 +1571,172 @@ func HandleTaskWithAiCenter(ctx *context.Context) {
r["updateCounts"] = updateCounts
ctx.JSON(http.StatusOK, response.SuccessWithData(r))
}
+
+func GrampusNotebookDebug(ctx *context.Context) {
+
+ result, err := grampus.GetNotebookJob(ctx.Cloudbrain.JobID)
+
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplDebugJobIndex, nil)
+ return
+ }
+ if len(result.JobInfo.Tasks) > 0 {
+
+ ctx.Redirect(result.JobInfo.Tasks[0].Url + "?token=" + result.JobInfo.Tasks[0].Token)
+ return
+ }
+ ctx.NotFound("Can not find the job.", nil)
+
+}
+
+func GrampusNotebookRestart(ctx *context.Context) {
+ var id = ctx.Params(":id")
+ var resultCode = "-1"
+ var errorMsg = ""
+ var status = ""
+ var spec *models.Specification
+
+ task := ctx.Cloudbrain
+ if ctx.Written() {
+ return
+ }
+
+ for {
+
+ if task.Status != models.GrampusStatusStopped && task.Status != models.GrampusStatusSucceeded && task.Status != models.GrampusStatusFailed {
+ log.Error("the job(%s) is not stopped", task.JobName, ctx.Data["MsgID"])
+ errorMsg = "the job is not stopped"
+ break
+ }
+
+ count, err := cloudbrainTask.GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeC2Net, string(models.JobTypeDebug), task.ComputeResource)
+
+ if err != nil {
+ log.Error("GetCloudbrainNotebookCountByUserID failed:%v", err, ctx.Data["MsgID"])
+ errorMsg = "system error"
+ break
+ } else {
+ if count >= 1 {
+ log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
+ resultCode = "2"
+ errorMsg = ctx.Tr("repo.cloudbrain.morethanonejob")
+ break
+ }
+ }
+
+ oldSpec, err := resource.GetCloudbrainSpec(task.ID)
+ if err != nil || oldSpec == nil {
+ log.Error("NotebookManage GetCloudbrainSpec error.%v", err)
+ errorMsg = "Resource specification not available"
+ break
+ }
+
+ computeSourceSimple := models.GPU
+ action := models.ActionCreateGrampusGPUDebugTask
+ if task.ComputeResource == models.NPUResource {
+ computeSourceSimple = models.NPU
+ action = models.ActionCreateGrampusNPUDebugTask
+ }
+ spec, err = resource.GetAndCheckSpec(ctx.User.ID, oldSpec.ID, models.FindSpecsOptions{
+ JobType: models.JobType(task.JobType),
+ ComputeResource: computeSourceSimple,
+ Cluster: models.C2NetCluster,
+ })
+ if err != nil || spec == nil {
+ log.Error("NotebookManage GetAndCheckSpec error.task.id = %d", task.ID)
+ errorMsg = "Resource specification not support any more"
+ break
+ }
+ if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
+ log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
+ errorMsg = ctx.Tr("points.insufficient_points_balance")
+ break
+ }
+ if task.IsGPUTask() {
+ if _, err := os.Stat(getOldJobPath(task)); err != nil {
+ log.Error("Can not find job minio path", err)
+ resultCode = "-1"
+ errorMsg = ctx.Tr("cloudbrain.result_cleared")
+ break
+ }
+ }
+
+ if !HasModelFile(task) { //使用预训练模型训练
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+ if hasDatasetDeleted(task) {
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
+
+ createTime := timeutil.TimeStampNow()
+
+ res, err := grampus.RestartNotebookJob(task.JobID)
+ if err != nil {
+ log.Error("ManageNotebook2(%s) failed:%v", task.DisplayJobName, err.Error(), ctx.Data["MsgID"])
+ errorMsg = ctx.Tr("repo.debug_again_fail")
+ break
+ }
+
+ if res.GrampusResult.ErrorCode != 0 || res.NewId == "" {
+ log.Error("ManageNotebook2 failed:" + res.GrampusResult.ErrorMsg)
+ errorMsg = ctx.Tr("repo.debug_again_fail")
+ break
+ }
+
+ newTask := &models.Cloudbrain{
+ Status: res.Status,
+ UserID: task.UserID,
+ RepoID: task.RepoID,
+ JobID: res.NewId,
+ JobName: task.JobName,
+ DisplayJobName: task.DisplayJobName,
+ JobType: task.JobType,
+ Type: task.Type,
+ Uuid: task.Uuid,
+ Image: task.Image,
+ ImageID: task.ImageID,
+ EngineID: task.EngineID,
+ CommitID: task.CommitID,
+ EngineName: task.EngineName,
+ IsLatestVersion: "1",
+ BranchName: task.BranchName,
+ DatasetName: task.DatasetName,
+ ComputeResource: task.ComputeResource,
+ Description: task.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
+ WorkServerNumber: 1,
+ }
+
+ err = models.RestartCloudbrain(task, newTask)
+ if err != nil {
+ log.Error("RestartCloudbrain(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
+ errorMsg = "system error"
+ break
+ }
+
+ id = strconv.FormatInt(newTask.ID, 10)
+
+ status = res.Status
+ resultCode = "0"
+
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, newTask.DisplayJobName, action)
+
+ break
+ }
+
+ ctx.JSON(200, map[string]string{
+ "result_code": resultCode,
+ "error_msg": errorMsg,
+ "status": status,
+ "id": id,
+ })
+}
diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go
index 47e8e7464..5ccf53bf5 100755
--- a/routers/repo/modelarts.go
+++ b/routers/repo/modelarts.go
@@ -255,10 +255,37 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
return
}
+ req := cloudbrain.GenerateModelArtsNotebookReq{
+ DisplayJobName: displayJobName,
+ JobName: jobName,
+ Description: description,
+ Uuid: uuid,
+ ImageId: imageId,
+ Spec: spec,
+ BootFile: "",
+ AutoStopDurationMs: modelarts.AutoStopDurationMs,
+ }
+
+ if form.ModelName != "" { //使用预训练模型训练
+ _, err := models.QueryModelByPath(form.PreTrainModelUrl)
+ if err != nil {
+ log.Error("Can not find model", err)
+ notebookNewDataPrepare(ctx)
+ ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tplModelArtsNotebookNew, &form)
+ return
+ }
+ req.ModelName = form.ModelName
+ req.LabelName = form.LabelName
+ req.CkptName = form.CkptName
+ req.ModelVersion = form.ModelVersion
+ req.PreTrainModelUrl = form.PreTrainModelUrl
+
+ }
+
if setting.ModelartsCD.Enabled {
- _, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs)
+ _, err = modelarts_cd.GenerateNotebook(ctx, req)
} else {
- _, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, uuid, description, imageId, spec, "", modelarts.AutoStopDurationMs)
+ _, err = modelarts.GenerateNotebook2(ctx, req)
}
if err != nil {
@@ -295,11 +322,17 @@ func NotebookShow(ctx *context.Context) {
}
- datasetDownload := make([]models.DatasetDownload, 0)
+ datasetDownload := make([]*models.DatasetDownload, 0)
+ var modelDownload models.ModelDownload
if ctx.IsSigned {
if task.Uuid != "" && task.UserID == ctx.User.ID {
datasetDownload = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, true)
}
+ if task.ModelName != "" && task.UserID == ctx.User.ID {
+ modelDownload = GetModelDownload(task)
+
+ }
+
}
user, err := models.GetUserByID(task.UserID)
if err == nil {
@@ -320,6 +353,7 @@ func NotebookShow(ctx *context.Context) {
}
ctx.Data["duration"] = task.TrainJobDuration
ctx.Data["datasetDownload"] = datasetDownload
+ ctx.Data["modelDownload"] = modelDownload
ctx.Data["task"] = task
ctx.Data["ID"] = ID
ctx.Data["jobName"] = task.JobName
@@ -327,8 +361,25 @@ func NotebookShow(ctx *context.Context) {
ctx.HTML(200, tplModelArtsNotebookShow)
}
-func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool) []models.DatasetDownload {
- datasetDownload := make([]models.DatasetDownload, 0)
+func GetModelDownload(task *models.Cloudbrain) models.ModelDownload {
+ index := strings.Index(task.PreTrainModelUrl, "/")
+ key := task.PreTrainModelUrl[index+1:] + task.CkptName
+ url, _ := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, key)
+ modelDownload := models.ModelDownload{
+ Name: task.CkptName,
+ DownloadLink: url,
+ IsDelete: false,
+ }
+
+ if !HasModelFile(task) {
+ log.Warn("Can not get model by path:" + task.PreTrainModelUrl)
+ modelDownload.IsDelete = true
+ }
+ return modelDownload
+}
+
+func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool) []*models.DatasetDownload {
+ datasetDownload := make([]*models.DatasetDownload, 0)
if len(uuid) == 0 {
return datasetDownload
}
@@ -365,7 +416,7 @@ func GetCloudBrainDataSetInfo(uuid string, datasetname string, isNeedDown bool)
}
}
- datasetDownload = append(datasetDownload, models.DatasetDownload{
+ datasetDownload = append(datasetDownload, &models.DatasetDownload{
DatasetName: name,
DatasetDownloadLink: url,
RepositoryLink: link,
@@ -492,6 +543,16 @@ func NotebookRestart(ctx *context.Context) {
errorMsg = ctx.Tr("points.insufficient_points_balance")
break
}
+ if !HasModelFile(task) { //使用预训练模型训练
+ errorMsg = ctx.Tr("repo.debug.manage.model_not_exist")
+ break
+ }
+
+ if hasDatasetDeleted(task) {
+ errorMsg = ctx.Tr("repo.debug.manage.dataset_not_exist")
+ break
+ }
+
createTime := timeutil.TimeStampNow()
param := models.NotebookAction{
Action: models.ActionStart,
@@ -527,21 +588,26 @@ func NotebookRestart(ctx *context.Context) {
}
newTask := &models.Cloudbrain{
- Status: res.Status,
- UserID: task.UserID,
- RepoID: task.RepoID,
- JobID: task.JobID,
- JobName: task.JobName,
- DisplayJobName: task.DisplayJobName,
- JobType: task.JobType,
- Type: task.Type,
- Uuid: task.Uuid,
- Image: task.Image,
- ComputeResource: task.ComputeResource,
- Description: task.Description,
- CreatedUnix: createTime,
- UpdatedUnix: createTime,
- Spec: spec,
+ Status: res.Status,
+ UserID: task.UserID,
+ RepoID: task.RepoID,
+ JobID: task.JobID,
+ JobName: task.JobName,
+ DisplayJobName: task.DisplayJobName,
+ JobType: task.JobType,
+ Type: task.Type,
+ Uuid: task.Uuid,
+ Image: task.Image,
+ ComputeResource: task.ComputeResource,
+ Description: task.Description,
+ CreatedUnix: createTime,
+ UpdatedUnix: createTime,
+ Spec: spec,
+ ModelName: task.ModelName,
+ ModelVersion: task.ModelVersion,
+ LabelName: task.LabelName,
+ PreTrainModelUrl: task.PreTrainModelUrl,
+ CkptName: task.CkptName,
}
err = models.RestartCloudbrain(task, newTask)
@@ -584,17 +650,7 @@ func NotebookStop(ctx *context.Context) {
break
}
- param := models.NotebookAction{
- Action: models.ActionStop,
- }
-
- var err error
- var res *models.NotebookActionResult
- if task.Type == models.TypeCloudBrainTwo {
- res, err = modelarts.ManageNotebook2(task.JobID, param)
- } else if task.Type == models.TypeCDCenter {
- res, err = modelarts_cd.ManageNotebook(task.JobID, param)
- }
+ err, res := StopModelArtsNotebook(task)
if err != nil {
log.Error("ManageNotebook2(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
@@ -635,6 +691,21 @@ func NotebookStop(ctx *context.Context) {
})
}
+func StopModelArtsNotebook(task *models.Cloudbrain) (error, *models.NotebookActionResult) {
+ param := models.NotebookAction{
+ Action: models.ActionStop,
+ }
+
+ var err error
+ var res *models.NotebookActionResult
+ if task.Type == models.TypeCloudBrainTwo {
+ res, err = modelarts.ManageNotebook2(task.JobID, param)
+ } else if task.Type == models.TypeCDCenter {
+ res, err = modelarts_cd.ManageNotebook(task.JobID, param)
+ }
+ return err, res
+}
+
func NotebookDel(ctx *context.Context) {
var listType = ctx.Query("debugListType")
task := ctx.Cloudbrain
@@ -1807,7 +1878,7 @@ func TrainJobShow(ctx *context.Context) {
return
}
ctx.Data["canNewJob"] = canNewJob
- datasetList := make([][]models.DatasetDownload, 0)
+ datasetList := make([][]*models.DatasetDownload, 0)
//将运行参数转化为epoch_size = 3, device_target = Ascend的格式
for i, task := range VersionListTasks {
@@ -2353,7 +2424,7 @@ func InferenceJobIndex(ctx *context.Context) {
tasks[i].ComputeResource = models.NPUResource
}
}
-
+ isQueryPrivate := isQueryPrivateModel(ctx)
repoId := ctx.Repo.Repository.ID
Type := -1
_, model_count, _ := models.QueryModel(&models.AiModelQueryOptions{
@@ -2361,10 +2432,12 @@ func InferenceJobIndex(ctx *context.Context) {
Page: 1,
PageSize: 2,
},
- RepoID: repoId,
- Type: Type,
- New: MODEL_LATEST,
- Status: 0,
+ RepoID: repoId,
+ Type: Type,
+ New: MODEL_LATEST,
+ IsOnlyThisRepo: true,
+ Status: 0,
+ IsQueryPrivate: isQueryPrivate,
})
ctx.Data["MODEL_COUNT"] = model_count
@@ -2433,7 +2506,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error {
return err
}
ctx.Data["config_list"] = configList.ParaConfigs
-
+ isQueryPrivate := isQueryPrivateModel(ctx)
repoId := ctx.Repo.Repository.ID
Type := -1
_, model_count, _ := models.QueryModel(&models.AiModelQueryOptions{
@@ -2441,10 +2514,12 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error {
Page: 1,
PageSize: 2,
},
- RepoID: repoId,
- Type: Type,
- New: MODEL_LATEST,
- Status: 0,
+ RepoID: repoId,
+ Type: Type,
+ New: MODEL_LATEST,
+ IsOnlyThisRepo: true,
+ Status: 0,
+ IsQueryPrivate: isQueryPrivate,
})
ctx.Data["MODEL_COUNT"] = model_count
ctx.Data["datasetType"] = models.TypeCloudBrainTwo
diff --git a/routers/repo/repo_statistic.go b/routers/repo/repo_statistic.go
index 468e6fa85..c1a7954a7 100755
--- a/routers/repo/repo_statistic.go
+++ b/routers/repo/repo_statistic.go
@@ -166,6 +166,8 @@ func RepoStatisticDaily(date string) {
repoStat.NumIssuesGrowth = repoStat.NumIssues - repoStatisticFourMonthsAgo.NumIssues
}
+ models.SyncStatDataToRepo(repo)
+
if _, err = models.InsertRepoStat(&repoStat); err != nil {
log.Error("InsertRepoStat failed(%s): %v", projectName, err)
log.Error("failed statistic: %s", projectName)
diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go
index 508addf75..8121f167c 100755
--- a/routers/repo/user_data_analysis.go
+++ b/routers/repo/user_data_analysis.go
@@ -21,6 +21,7 @@ import (
const (
PAGE_SIZE = 2000
Excel_File_Path = "/useranalysis/"
+ USER_YEAR = 2022
)
func getUserMetricsExcelHeader(ctx *context.Context) map[string]string {
@@ -104,6 +105,7 @@ func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.ModelConvertCount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset"))
@@ -178,6 +180,8 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ModelConvertCount)
+ tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
@@ -256,6 +260,8 @@ func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ModelConvertCount)
+ tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
@@ -714,6 +720,12 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) {
log.Info("startTime time:" + startTime.Format("2006-01-02 15:04:05"))
log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05"))
warnEmailMessage := "用户统计信息入库失败,请尽快定位。"
+
+ startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location())
+ endYear := startYear.AddDate(1, 0, 0)
+
+ models.RefreshUserYearTable(startYear, endYear)
+
//query wiki data
log.Info("start to time count data")
wikiMap, err := queryWikiCountMap(startTime, endTime)
@@ -907,3 +919,9 @@ func QueryUserLoginInfo(ctx *context.Context) {
log.Info("writer exel error." + err.Error())
}
}
+
+func QueryUserAnnualReport(ctx *context.Context) {
+ log.Info("start to QueryUserAnnualReport ")
+ result := models.QueryUserAnnualReport(ctx.User.ID)
+ ctx.JSON(http.StatusOK, result)
+}
diff --git a/routers/repo/user_invitation.go b/routers/repo/user_invitation.go
index a2752a481..6e7207bce 100644
--- a/routers/repo/user_invitation.go
+++ b/routers/repo/user_invitation.go
@@ -49,9 +49,10 @@ func getInvitationDetailExcelHeader(ctx *context.Context) map[string]string {
excelHeader := make([]string, 0)
excelHeader = append(excelHeader, ctx.Tr("user.static.id"))
excelHeader = append(excelHeader, ctx.Tr("user.static.name"))
- excelHeader = append(excelHeader, ctx.Tr("user.static.srcUserId"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.email"))
excelHeader = append(excelHeader, ctx.Tr("user.static.phone"))
excelHeader = append(excelHeader, ctx.Tr("user.static.registdate"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.srcUserId"))
excelHeaderMap := make(map[string]string, 0)
var i byte
@@ -92,8 +93,7 @@ func writeInvitationDetailExcel(row int, xlsx *excelize.File, sheetName string,
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name)
tmp = tmp + 1
-
- xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SrcUserID)
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Phone)
@@ -101,7 +101,9 @@ func writeInvitationDetailExcel(row int, xlsx *excelize.File, sheetName string,
formatTime := userRecord.CreatedUnix.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SrcUserID)
}
func DownloadInvitationDetail(ctx *context.Context) {
@@ -413,6 +415,7 @@ func queryData(ctx *context.Context, startTime time.Time, endTime time.Time) {
invi.Name = tmpUser.Name
invi.Phone = tmpUser.PhoneNumber
invi.CreatedUnix = tmpUser.CreatedUnix
+ invi.Email = tmpUser.Email
} else {
invi.Name = "已注销"
}
diff --git a/routers/routes/routes.go b/routers/routes/routes.go
index 60f0365ea..063a20999 100755
--- a/routers/routes/routes.go
+++ b/routers/routes/routes.go
@@ -371,7 +371,18 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/images/custom", repo.GetCustomImages)
m.Get("/images/star", repo.GetStarImages)
- m.Get("/repos", routers.ExploreRepos)
+ m.Group("/repos", func() {
+ //m.Get("", routers.ExploreRepos)
+ m.Get("", routers.GetRepoSearchPage)
+ m.Group("/square", func() {
+ m.Get("", routers.GetRepoSquarePage)
+ m.Get("/tab", routers.RepoSquare)
+ m.Get("/active-user", routers.ActiveUser)
+ m.Get("/active-org", routers.ActiveOrg)
+ })
+
+ m.Get("/search", routers.RepoFind)
+ })
m.Get("/datasets", routers.ExploreDatasets)
m.Get("/users", routers.ExploreUsers)
m.Get("/organizations", routers.ExploreOrganizations)
@@ -1218,10 +1229,23 @@ func RegisterRoutes(m *macaron.Macaron) {
})
}, context.RepoRef())
m.Group("/grampus", func() {
+ m.Group("/notebook", func() {
+ m.Group("/:id", func() {
+ m.Get("", reqRepoCloudBrainReader, repo.GrampusNotebookShow)
+ m.Get("/debug", reqWechatBind, cloudbrain.AdminOrJobCreaterRight, repo.GrampusNotebookDebug)
+ m.Post("/restart", reqWechatBind, cloudbrain.AdminOrJobCreaterRight, repo.GrampusNotebookRestart)
+ m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob)
+ m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusNotebookDel)
+ })
+
+ m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, context.PointAccount(), repo.GrampusNotebookNew)
+ m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateGrampusNotebookForm{}), repo.GrampusNotebookCreate)
+ })
+
m.Group("/train-job", func() {
m.Group("/:jobid", func() {
m.Get("", reqRepoCloudBrainReader, repo.GrampusTrainJobShow)
- m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.GrampusStopJob)
+ m.Post("/stop", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusStopJob)
m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.GrampusTrainJobDel)
m.Get("/model_download", cloudbrain.AdminOrJobCreaterRightForTrain, repo.ModelDownload)
m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.GrampusTrainJobVersionNew)
@@ -1251,6 +1275,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/delete_model_convert/:id", repo.DeleteModelConvert)
m.Post("/convert_stop/:id", repo.StopModelConvert)
m.Put("/modify_model", repo.ModifyModelInfo)
+ m.Put("/modify_model_status", repo.ModifyModelPrivate)
m.Get("/show_model", reqRepoModelManageReader, repo.ShowModelTemplate)
m.Get("/convert_model", reqRepoModelManageReader, repo.ConvertModelTemplate)
m.Get("/show_model_info", repo.ShowModelInfo)
@@ -1290,16 +1315,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/modelarts", func() {
m.Group("/notebook", func() {
- /* v1.0
- m.Group("/:jobid", func() {
- m.Get("", reqRepoCloudBrainReader, repo.NotebookShow)
- m.Get("/debug", cloudbrain.AdminOrJobCreaterRight, repo.NotebookDebug)
- m.Post("/:action", reqRepoCloudBrainWriter, repo.NotebookManage)
- m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRight, repo.NotebookDel)
- })
- m.Get("/create", reqRepoCloudBrainWriter, repo.NotebookNew)
- m.Post("/create", reqRepoCloudBrainWriter, bindIgnErr(auth.CreateModelArtsNotebookForm{}), repo.NotebookCreate)
- */
m.Group("/:id", func() {
m.Get("", reqRepoCloudBrainReader, repo.NotebookShow)
m.Get("/debug", cloudbrain.AdminOrJobCreaterRight, repo.NotebookDebug2)
diff --git a/routers/user/Invitation.go b/routers/user/Invitation.go
index 8491390b2..0eb8ae2f4 100644
--- a/routers/user/Invitation.go
+++ b/routers/user/Invitation.go
@@ -63,7 +63,7 @@ func InviationTpl(ctx *context.Context) {
ctx.HTML(200, tplInvitation)
}
-func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhoneNumber string) error {
+func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhoneNumber string, email string) error {
user := parseInvitaionCode(invitationcode)
if user == nil {
return errors.New("The invitated user not existed.")
@@ -85,6 +85,7 @@ func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhone
SrcUserID: user.ID,
UserID: newUserId,
Phone: newPhoneNumber,
+ Email: email,
}
err := models.InsertInvitaion(invitation)
diff --git a/routers/user/auth.go b/routers/user/auth.go
index 3d74b6ddd..5314571d2 100755
--- a/routers/user/auth.go
+++ b/routers/user/auth.go
@@ -1368,7 +1368,7 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo
log.Info("enter here, and form.InvitaionCode =" + invitationCode)
if invitationCode != "" {
- RegisteUserByInvitaionCode(invitationCode, u.ID, u.PhoneNumber)
+ RegisteUserByInvitaionCode(invitationCode, u.ID, u.PhoneNumber, u.Email)
}
err := models.AddEmailAddress(&models.EmailAddress{
diff --git a/services/cloudbrain/clear.go b/services/cloudbrain/clear.go
new file mode 100644
index 000000000..44613ee3c
--- /dev/null
+++ b/services/cloudbrain/clear.go
@@ -0,0 +1,151 @@
+package cloudbrain
+
+import (
+ "io/ioutil"
+ "os"
+ "sort"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+)
+
+func ClearCloudbrainResultSpace() {
+ log.Info("clear cloudbrain one result space begin.")
+ if !setting.ClearStrategy.Enabled{
+ return
+ }
+
+ tasks, err := models.GetCloudBrainOneStoppedNotDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.BatchSize)
+ if err != nil {
+ log.Warn("Failed to get cloudbrain, clear result failed.", err)
+ return
+ }
+ debugTasks, err := models.GetCloudBrainOneStoppedDebugJobDaysAgo(setting.ClearStrategy.ResultSaveDays, setting.ClearStrategy.DebugJobSize)
+ if err != nil {
+ log.Warn("Failed to get debug cloudbrain.", err)
+
+ }
+ tasks=append(tasks,debugTasks...)
+
+ if err != nil {
+ log.Warn("Failed to get cloudbrain, clear result failed.", err)
+ return
+ }
+ var ids []int64
+ for _, task := range tasks {
+ err := DeleteCloudbrainOneJobStorage(task.JobName)
+ if err == nil {
+ log.Info("clear job in cloudbrain table:"+task.JobName)
+ ids = append(ids, task.ID)
+ }
+ }
+
+ err = models.UpdateCloudBrainRecordsCleared(ids)
+ if err != nil {
+ log.Warn("Failed to set cloudbrain cleared status", err)
+ }
+ //如果云脑表处理完了,通过遍历minio对象处理历史垃圾数据,如果存在的话
+ if len(tasks) < setting.ClearStrategy.BatchSize+setting.ClearStrategy.DebugJobSize {
+ clearLocalHistoryTrashFile()
+ clearMinioHistoryTrashFile()
+
+ }
+ log.Info("clear cloudbrain one result space end.")
+
+}
+
+func clearMinioHistoryTrashFile() {
+ JobRealPrefix := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix
+
+ miniofiles, err := ioutil.ReadDir(JobRealPrefix)
+
+ processCount := 0
+ if err != nil {
+ log.Warn("Can not browser minio job path.")
+ } else {
+ SortModTimeAscend(miniofiles)
+ for _, file := range miniofiles {
+
+ if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
+
+ has,err:=models.IsCloudbrainExistByJobName(file.Name())
+ if err==nil && !has {
+ dirPath := setting.CBCodePathPrefix + file.Name() + "/"
+ log.Info("clear job in minio trash:" + file.Name())
+ storage.Attachments.DeleteDir(dirPath)
+ processCount++
+ }
+ if processCount == setting.ClearStrategy.BatchSize {
+ break
+ }
+ } else {
+ break
+ }
+
+ }
+
+ }
+}
+
+func clearLocalHistoryTrashFile() {
+ files, err := ioutil.ReadDir(setting.JobPath)
+ processCount := 0
+ if err != nil {
+ log.Warn("Can not browser local job path.")
+ } else {
+ SortModTimeAscend(files)
+ for _, file := range files {
+ //清理n天前的历史垃圾数据,清理job目录
+ if file.Name()!="" && file.ModTime().Before(time.Now().AddDate(0, 0, -setting.ClearStrategy.TrashSaveDays)) {
+ has,err:=models.IsCloudbrainExistByJobName(file.Name())
+ if err==nil && !has{
+ os.RemoveAll(setting.JobPath + file.Name())
+ log.Info("clear job in local trash:"+file.Name())
+ processCount++
+ }
+ if processCount == setting.ClearStrategy.BatchSize {
+ break
+ }
+ } else {
+ break
+ }
+
+ }
+
+ }
+
+}
+
+func SortModTimeAscend(files []os.FileInfo) {
+ sort.Slice(files, func(i, j int) bool {
+ return files[i].ModTime().Before(files[j].ModTime())
+ })
+}
+
+func DeleteCloudbrainOneJobStorage(jobName string) error {
+
+ if jobName==""{
+ return nil
+ }
+ //delete local
+ localJobPath := setting.JobPath + jobName
+ err := os.RemoveAll(localJobPath)
+ if err != nil {
+ log.Error("RemoveAll(%s) failed:%v", localJobPath, err)
+ }
+
+ dirPath := setting.CBCodePathPrefix + jobName + "/"
+ err1 := storage.Attachments.DeleteDir(dirPath)
+
+ if err1 != nil {
+ log.Error("DeleteDir(%s) failed:%v", localJobPath, err)
+ }
+ if err == nil {
+ err = err1
+ }
+
+ return err
+}
diff --git a/services/cloudbrain/cloudbrainTask/count.go b/services/cloudbrain/cloudbrainTask/count.go
index 985706911..4ae742c3a 100644
--- a/services/cloudbrain/cloudbrainTask/count.go
+++ b/services/cloudbrain/cloudbrainTask/count.go
@@ -62,6 +62,16 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.NPUResource,
+}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: {
+ CloudBrainTypes: []int{models.TypeC2Net},
+ JobType: []models.JobType{models.JobTypeDebug},
+ NotFinalStatuses: GrampusNotFinalStatuses,
+ ComputeResource: models.GPUResource,
+}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: {
+ CloudBrainTypes: []int{models.TypeC2Net},
+ JobType: []models.JobType{models.JobTypeDebug},
+ NotFinalStatuses: GrampusNotFinalStatuses,
+ ComputeResource: models.NPUResource,
}}
func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, computeResource ...string) (int, error) {
diff --git a/services/cloudbrain/cloudbrainTask/notebook.go b/services/cloudbrain/cloudbrainTask/notebook.go
index 6b2fcf707..cc9563520 100644
--- a/services/cloudbrain/cloudbrainTask/notebook.go
+++ b/services/cloudbrain/cloudbrainTask/notebook.go
@@ -82,7 +82,7 @@ func FileNotebookCreate(ctx *context.Context, option api.CreateFileNotebookJobOp
})
}
if err != nil {
- ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo",setting.FileNoteBook.ProjectName)))
+ ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.failed_to_create_notebook_repo", setting.FileNoteBook.ProjectName)))
return
}
if option.Type <= 1 {
@@ -291,10 +291,21 @@ func modelartsFileNoteBookCreate(ctx *context.Context, option api.CreateFileNote
}
var jobId string
+ req := cloudbrain.GenerateModelArtsNotebookReq{
+ DisplayJobName: displayJobName,
+ JobName: jobName,
+ Description: getDescription(option),
+ ImageId: setting.FileNoteBook.ImageIdNPU,
+ Spec: spec,
+ BootFile: "",
+ AutoStopDurationMs: modelarts.AutoStopDurationMs / 4,
+ }
+
if setting.ModelartsCD.Enabled {
- jobId, err = modelarts_cd.GenerateNotebook(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPUCD, spec, option.File,modelarts.AutoStopDurationMs/4)
+ req.ImageId = setting.FileNoteBook.ImageIdNPUCD
+ jobId, err = modelarts_cd.GenerateNotebook(ctx, req)
} else {
- jobId, err = modelarts.GenerateNotebook2(ctx, displayJobName, jobName, "", getDescription(option), setting.FileNoteBook.ImageIdNPU, spec, option.File,modelarts.AutoStopDurationMs/4)
+ jobId, err = modelarts.GenerateNotebook2(ctx, req)
}
if err != nil {
diff --git a/services/cloudbrain/cloudbrainTask/sync_status.go b/services/cloudbrain/cloudbrainTask/sync_status.go
index 973b9bbc2..3bc09071c 100644
--- a/services/cloudbrain/cloudbrainTask/sync_status.go
+++ b/services/cloudbrain/cloudbrainTask/sync_status.go
@@ -3,9 +3,13 @@ package cloudbrainTask
import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
+ "code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/modelarts"
+ "code.gitea.io/gitea/modules/modelarts_cd"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
"net/http"
"strconv"
)
@@ -58,6 +62,55 @@ func SyncCloudBrainOneStatus(task *models.Cloudbrain) (*models.Cloudbrain, error
}
+func SyncGrampusNotebookStatus(job *models.Cloudbrain) (*models.Cloudbrain, error) {
+ result, err := grampus.GetNotebookJob(job.JobID)
+ if err != nil {
+
+ log.Error("GetJob(%s) failed:%v", job.JobName, err)
+
+ return job, err
+ }
+
+ if job.StartTime == 0 && result.JobInfo.StartedAt > 0 {
+ job.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
+ }
+ oldStatus := job.Status
+ job.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
+ job.Duration = result.JobInfo.RunSec
+ job.TrainJobDuration = models.ConvertDurationToStr(job.Duration)
+
+ if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 {
+ job.EndTime = job.StartTime.Add(job.Duration)
+ }
+ job.CorrectCreateUnix()
+
+ if len(job.AiCenter) == 0 {
+ if len(result.JobInfo.Tasks) > 0 {
+ if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 {
+ job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
+ }
+ }
+ }
+
+ if job.Status != models.GrampusStatusWaiting {
+ if oldStatus != job.Status {
+ notification.NotifyChangeCloudbrainStatus(job, oldStatus)
+ }
+ if job.ComputeResource == models.NPUResource {
+ job.TrainUrl = result.JobInfo.Tasks[0].CodeUrl
+ job.DataUrl = result.JobInfo.Tasks[0].DataUrl
+ }
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ return nil, err
+ }
+ }
+
+ return job, nil
+
+}
+
func isNoteBookReady(task *models.Cloudbrain) bool {
if task.JobType != string(models.JobTypeDebug) {
return true
@@ -90,3 +143,28 @@ func isNoteBookReady(task *models.Cloudbrain) bool {
return false
}
+
+func StopDebugJob(task *models.Cloudbrain) error {
+ param := models.NotebookAction{
+ Action: models.ActionStop,
+ }
+ var err error = nil
+
+ if task.JobType == string(models.JobTypeDebug) {
+ if task.Type == models.TypeCloudBrainOne {
+ return cloudbrain.StopJob(task.JobID)
+ } else if task.Type == models.TypeCloudBrainTwo {
+ _, err = modelarts.ManageNotebook2(task.JobID, param)
+
+ } else if task.Type == models.TypeCDCenter {
+ _, err = modelarts_cd.ManageNotebook(task.JobID, param)
+
+ } else if task.Type == models.TypeC2Net {
+ _, err = grampus.StopJob(task.JobID, task.JobType)
+
+ }
+
+ }
+ return err
+
+}
diff --git a/services/cloudbrain/resource/resource_specification.go b/services/cloudbrain/resource/resource_specification.go
index 8f4182d87..5070d7c1e 100644
--- a/services/cloudbrain/resource/resource_specification.go
+++ b/services/cloudbrain/resource/resource_specification.go
@@ -246,10 +246,10 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S
return nil, err
}
//filter exclusive specs
- specs := filterExclusiveSpecs(r, userId)
+ specs := models.FilterExclusiveSpecs(r, userId)
//distinct by sourceSpecId
- specs = distinctSpecs(specs)
+ specs = models.DistinctSpecs(specs)
return specs, err
}
@@ -265,50 +265,6 @@ func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api
return result, nil
}
-func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification {
- specs := make([]*models.Specification, 0, len(r))
- specMap := make(map[int64]string, 0)
- for i := 0; i < len(r); i++ {
- spec := r[i]
- if _, has := specMap[spec.ID]; has {
- continue
- }
- if !spec.IsExclusive {
- specs = append(specs, spec)
- specMap[spec.ID] = ""
- continue
- }
- orgs := strings.Split(spec.ExclusiveOrg, ";")
- for _, org := range orgs {
- isMember, _ := models.IsOrganizationMemberByOrgName(org, userId)
- if isMember {
- specs = append(specs, spec)
- specMap[spec.ID] = ""
- break
- }
- }
- }
- return specs
-}
-
-func distinctSpecs(r []*models.Specification) []*models.Specification {
- specs := make([]*models.Specification, 0, len(r))
- sourceSpecIdMap := make(map[string]string, 0)
- for i := 0; i < len(r); i++ {
- spec := r[i]
- if spec.SourceSpecId == "" {
- specs = append(specs, spec)
- continue
- }
- if _, has := sourceSpecIdMap[spec.SourceSpecId]; has {
- continue
- }
- specs = append(specs, spec)
- sourceSpecIdMap[spec.SourceSpecId] = ""
- }
- return specs
-}
-
func GetAndCheckSpec(userId int64, specId int64, opts models.FindSpecsOptions) (*models.Specification, error) {
if specId == 0 {
return nil, nil
diff --git a/services/repository/contributor.go b/services/repository/contributor.go
new file mode 100644
index 000000000..9a86b91dc
--- /dev/null
+++ b/services/repository/contributor.go
@@ -0,0 +1,88 @@
+package repository
+
+import (
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/redis/redis_client"
+ "code.gitea.io/gitea/modules/redis/redis_key"
+ "encoding/json"
+ "github.com/patrickmn/go-cache"
+ "time"
+)
+
+var repoContributorCache = cache.New(5*time.Minute, 1*time.Minute)
+
+type ContributorCacheVal struct {
+ Contributors []*models.ContributorInfo
+ Total int
+}
+
+func GetRepoTopNContributors(repo *models.Repository, N int) ([]*models.ContributorInfo, int) {
+ val, _ := redis_client.Get(redis_key.RepoTopNContributors(repo.ID, N))
+ if val != "" {
+ log.Debug("Get RepoTopNContributors from redis,repo.ID = %d value = %v", repo.ID, val)
+ temp := &ContributorCacheVal{}
+ json.Unmarshal([]byte(val), temp)
+ return temp.Contributors, temp.Total
+ }
+
+ contributorInfos, total := getRepoTopNContributorsFromDisk(repo, N)
+ log.Debug("Get RepoTopNContributors from disk,repo.ID = %d ", repo.ID)
+ jsonVal, err := json.Marshal(&ContributorCacheVal{Contributors: contributorInfos, Total: total})
+ if err == nil {
+ redis_client.Setex(redis_key.RepoTopNContributors(repo.ID, N), string(jsonVal), 2*time.Minute)
+ }
+ return contributorInfos, total
+}
+
+func getRepoTopNContributorsFromDisk(repo *models.Repository, N int) ([]*models.ContributorInfo, int) {
+ contributorInfos := make([]*models.ContributorInfo, 0)
+
+ branchName := GetDefaultBranchName(repo)
+ if branchName == "" {
+ return contributorInfos, 0
+ }
+
+ contributors, err := git.GetContributors(repo.RepoPath(), branchName)
+ if err == nil && contributors != nil {
+ contributorInfoHash := make(map[string]*models.ContributorInfo)
+ for _, c := range contributors {
+ if len(contributorInfos) >= N {
+ break
+ }
+ if c.Email == "" {
+ continue
+ }
+ // get user info from committer email
+ user, err := models.GetUserByActivateEmail(c.Email)
+ if err == nil {
+ // committer is system user, get info through user's primary email
+ if existedContributorInfo, ok := contributorInfoHash[user.Email]; ok {
+ // existed: same primary email, different committer name
+ existedContributorInfo.CommitCnt += c.CommitCnt
+ } else {
+ // new committer info
+ var newContributor = &models.ContributorInfo{
+ user.RelAvatarLink(), user.Name, user.Email, c.CommitCnt,
+ }
+ contributorInfos = append(contributorInfos, newContributor)
+ contributorInfoHash[user.Email] = newContributor
+ }
+ } else {
+ // committer is not system user
+ if existedContributorInfo, ok := contributorInfoHash[c.Email]; ok {
+ // existed: same primary email, different committer name
+ existedContributorInfo.CommitCnt += c.CommitCnt
+ } else {
+ var newContributor = &models.ContributorInfo{
+ "", "", c.Email, c.CommitCnt,
+ }
+ contributorInfos = append(contributorInfos, newContributor)
+ contributorInfoHash[c.Email] = newContributor
+ }
+ }
+ }
+ }
+ return contributorInfos, len(contributors)
+}
diff --git a/services/repository/repository.go b/services/repository/repository.go
index db25010ea..a5c7c2fc4 100644
--- a/services/repository/repository.go
+++ b/services/repository/repository.go
@@ -5,18 +5,19 @@
package repository
import (
- "fmt"
- "io/ioutil"
- "net/http"
- "os"
- "strings"
-
"code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
repo_module "code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
pull_service "code.gitea.io/gitea/services/pull"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strings"
+ "xorm.io/xorm"
)
const SHELL_FLAG_ON = 1
@@ -328,3 +329,47 @@ func IsUploadFileInvalidErr(err error) bool {
_, ok := err.(UploadFileInvalidErr)
return ok
}
+
+func IncreaseRepoDatasetNum(datasetID int64, engines ...*xorm.Engine) error {
+ dataset, err := models.GetDatasetByID(datasetID)
+ if err != nil {
+ return err
+ }
+ return models.OperateRepoDatasetNum(dataset.RepoID, 1, engines...)
+}
+
+func IncreaseRepoModelNum(repoId int64, engines ...*xorm.Engine) error {
+ return models.OperateRepoModelNum(repoId, 1, engines...)
+}
+
+func ResetRepoModelNum(repoId int64) error {
+ return models.ResetRepoModelNum(repoId)
+}
+
+func DecreaseRepoDatasetNum(datasetID int64, engines ...*xorm.Engine) error {
+ dataset, err := models.GetDatasetByID(datasetID)
+ if err != nil {
+ return err
+ }
+ return models.OperateRepoDatasetNum(dataset.RepoID, -1, engines...)
+}
+
+func DecreaseRepoModelNum(repoId int64, engines ...*xorm.Engine) error {
+ return models.OperateRepoModelNum(repoId, -1, engines...)
+}
+
+func GetDefaultBranchName(repo *models.Repository) string {
+ gitRepo, err := git.OpenRepository(repo.RepoPath())
+ if err != nil {
+ return ""
+ }
+ defer gitRepo.Close()
+ if len(repo.DefaultBranch) > 0 && gitRepo.IsBranchExist(repo.DefaultBranch) {
+ return repo.DefaultBranch
+ }
+ brs, _, err := gitRepo.GetBranches(0, 0)
+ if len(brs) > 0 {
+ return brs[0]
+ }
+ return ""
+}
diff --git a/services/repository/square.go b/services/repository/square.go
new file mode 100644
index 000000000..d68e5b189
--- /dev/null
+++ b/services/repository/square.go
@@ -0,0 +1,315 @@
+package repository
+
+import (
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "encoding/json"
+ "github.com/patrickmn/go-cache"
+ "time"
+)
+
+var repoSquareCache = cache.New(2*time.Minute, 1*time.Minute)
+
+const (
+ RREFERED_CACHE = "PreferredRepos"
+ REPO_BANNER_CACHE = "RepoBanner"
+ TOPICS_CACHE = "RepoTopics"
+ RECOMMEND_CACHE = "RecommendRepos"
+)
+
+func GetBanners() []map[string]string {
+ v, success := repoSquareCache.Get(REPO_BANNER_CACHE)
+ if success {
+ log.Debug("GetBanners from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]map[string]string)
+ return r
+ }
+ repoMap := getMapContent("repos/square_banner")
+ repoSquareCache.Set(REPO_BANNER_CACHE, repoMap, 1*time.Minute)
+ return repoMap
+}
+
+func GetTopics() []string {
+ v, success := repoSquareCache.Get(TOPICS_CACHE)
+ if success {
+ log.Debug("GetTopics from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]string)
+ return r
+ }
+ topics := getArrayContent("repos/recommend_topics")
+ repoSquareCache.Set(TOPICS_CACHE, topics, 1*time.Minute)
+ return topics
+}
+
+func getMapContent(fileName string) []map[string]string {
+ url := setting.RecommentRepoAddr + fileName
+ result, err := RecommendContentFromPromote(url)
+ remap := make([]map[string]string, 0)
+ if err == nil {
+ json.Unmarshal([]byte(result), &remap)
+ }
+ return remap
+}
+
+func getArrayContent(fileName string) []string {
+ url := setting.RecommentRepoAddr + fileName
+ result, err := RecommendContentFromPromote(url)
+ r := make([]string, 0)
+ if err == nil {
+ json.Unmarshal([]byte(result), &r)
+ }
+ return r
+}
+
+func GetRecommendRepos() []map[string]interface{} {
+ v, success := repoSquareCache.Get(RECOMMEND_CACHE)
+ if success {
+ log.Debug("GetRecommendRepos from cache,value = %v", v)
+ if v == nil {
+ return nil
+ }
+ r := v.([]map[string]interface{})
+ return r
+ }
+ repoMap := getMapContent("home/projects")
+ r, _ := GetRecommendRepoFromPromote(repoMap)
+ repoSquareCache.Set(RECOMMEND_CACHE, r, 1*time.Minute)
+ return r
+}
+
+func GetPreferredRepos() ([]*models.Repository4Card, error) {
+ v, success := repoSquareCache.Get(RREFERED_CACHE)
+ if success {
+ log.Debug("GetPreferredRepos from cache,value = %v", v)
+ if v == nil {
+ return nil, nil
+ }
+ r := v.([]*models.Repository4Card)
+ return r, nil
+ }
+
+ repos, err := models.GetSelectedRepos(models.FindSelectedReposOpts{
+ ListOptions: models.ListOptions{
+ PageSize: 10,
+ Page: 1,
+ },
+ OnlyPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ result[i] = r.ToCardFormat()
+ }
+
+ repoSquareCache.Set(RREFERED_CACHE, result, 1*time.Minute)
+ return result, nil
+}
+
+func GetIncubationRepos() ([]*models.Repository4Card, error) {
+ org, err := models.GetOrgByName(setting.IncubationSourceOrgName)
+ if models.IsErrOrgNotExist(err) {
+ return make([]*models.Repository4Card, 0), nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ repos, err := models.GetSelectedRepos(models.FindSelectedReposOpts{
+ ListOptions: models.ListOptions{
+ PageSize: 10,
+ Page: 1,
+ },
+ OrgId: org.ID,
+ OnlyPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ result[i] = r.ToCardFormat()
+ }
+ return result, nil
+}
+
+func GetHotPaperRepos() ([]*models.Repository4Card, error) {
+ rlist, _, err := models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: models.ListOptions{
+ Page: 1,
+ PageSize: 10,
+ },
+ OrderBy: models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated,
+ TopicOnly: true,
+ TopicName: setting.PaperRepoTopicName,
+ AllPublic: true,
+ })
+ if err != nil {
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(rlist))
+ for i, r := range rlist {
+ result[i] = r.ToCardFormat()
+ }
+ return result, nil
+}
+
+type FindReposOptions struct {
+ models.ListOptions
+ Actor *models.User
+ Sort string
+ Keyword string
+ Topic string
+ Private bool
+ OwnerID int64
+}
+
+func FindRepos(opts FindReposOptions) (*models.FindReposResponse, error) {
+
+ var (
+ repos []*models.Repository
+ count int64
+ err error
+ orderBy models.SearchOrderBy
+ )
+
+ switch opts.Sort {
+ //1.近期热门:按最近1个月浏览量倒序排序,最近1个月浏览量>最近更新>项目名称升序
+ case "mostpopular":
+ orderBy = models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //2.近期活跃:按提交增长量(最近4个月commit数)倒序排序,提交增长量>最近更新>项目名称升序。
+ case "mostactive":
+ orderBy = models.SearchOrderByLastFourMonthCommitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //3.最近更新:按最近更新>项目名称升序排序。
+ case "recentupdate":
+ orderBy = models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //4.最近创建:按项目创建时间排序,最近的排前面。最近创建>项目名称升序。
+ case "newest":
+ orderBy = models.SearchOrderByNewest + "," + models.SearchOrderByAlphabetically
+ //5.点赞最多:按点赞数倒序排序。点赞数>最近更新>项目名称升序。
+ case "moststars":
+ orderBy = models.SearchOrderByStarsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //6.派生最多:按派生数倒序排序。派生数>最近更新>项目名称升序。
+ case "mostforks":
+ orderBy = models.SearchOrderByForksReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //7.数据集最多:按项目包含的数据集文件数量倒序排序,数据集文件数>最近更新>项目名称升序。
+ case "mostdatasets":
+ orderBy = models.SearchOrderByDatasetCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //8.AI任务最多:按项目包含的AI任务数量倒序排序,AI任务数>最近更新>项目名称升序。
+ case "mostaitasks":
+ orderBy = models.SearchOrderByAiTaskCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ //9.模型最多:按项目包含的模型数量倒序排序,模型大小为0则不统计。模型数>最近更新>项目名称升序。
+ case "mostmodels":
+ orderBy = models.SearchOrderByModelCntReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+
+ default:
+ orderBy = models.SearchOrderByLastMonthVisitsReverse + "," + models.SearchOrderByRecentUpdated + "," + models.SearchOrderByAlphabetically
+ }
+
+ repos, count, err = models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: opts.ListOptions,
+ Actor: opts.Actor,
+ OrderBy: orderBy,
+ Private: opts.Private,
+ Keyword: opts.Keyword,
+ OwnerID: opts.OwnerID,
+ AllPublic: true,
+ AllLimited: true,
+ TopicName: opts.Topic,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ })
+ if err != nil {
+ log.Error("FindRepos error when SearchRepository.%v", err)
+ return nil, err
+ }
+ result := make([]*models.Repository4Card, len(repos))
+ for i, r := range repos {
+ t := r.ToCardFormat()
+ contributors, _ := GetRepoTopNContributors(r, 6)
+ t.Contributors = contributors
+ result[i] = t
+ }
+
+ return &models.FindReposResponse{
+ Repos: result,
+ Total: count,
+ Page: opts.Page,
+ PageSize: opts.PageSize,
+ }, nil
+}
+
+type ActiveUser struct {
+ User *models.User4Front
+ Followed bool
+ ShowButton bool
+}
+
+func GetActiveUser4Square(currentUserId int64) ([]*ActiveUser, error) {
+ result := make([]*ActiveUser, 0)
+ userIds, err := models.QueryLast30DaysHighestIndexUsers(5)
+ if err != nil {
+ log.Error("ActiveUser err. %v", err)
+ return result, err
+ }
+ if len(userIds) == 0 {
+ return result, nil
+ }
+
+ users, err := models.GetUsersByIDs(userIds)
+ if err != nil {
+ return result, nil
+ }
+ usersMap := make(map[int64]*models.User)
+ for _, v := range users {
+ usersMap[v.ID] = v
+ }
+
+ for i := 0; i < len(userIds); i++ {
+ userId := userIds[i]
+ user := usersMap[userId]
+ if user == nil {
+ continue
+ }
+ isFollowed := false
+ if currentUserId != 0 {
+ isFollowed = models.IsFollowing(currentUserId, userId)
+ }
+ a := &ActiveUser{
+ Followed: isFollowed,
+ User: user.ToFrontFormat(),
+ ShowButton: currentUserId != userId,
+ }
+ result = append(result, a)
+ }
+ return result, nil
+}
+
+func GetActiveOrgs() ([]*models.User4Front, error) {
+ orgScores, err := models.FindTopNOpenIOrgs(5)
+ if err != nil {
+ return nil, err
+ }
+ orgs := make([]*models.User4Front, len(orgScores))
+ for i, v := range orgScores {
+ orgs[i] = v.ToFrontFormat()
+ }
+ return orgs, nil
+}
+
+func RefreshRepoStatData() {
+ repos, err := models.GetAllRepositories()
+ if err != nil {
+ log.Error("RefreshRepoStatData GetAllRepositories failed: %v", err.Error())
+ return
+ }
+ for _, repo := range repos {
+ models.SyncStatDataToRepo(repo)
+ }
+}
diff --git a/services/socketwrap/clientManager.go b/services/socketwrap/clientManager.go
index 7470b1198..7bac92ab8 100755
--- a/services/socketwrap/clientManager.go
+++ b/services/socketwrap/clientManager.go
@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap"
)
-var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35}
+var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 35, 39, 40}
type ClientsManager struct {
Clients *orderedmap.OrderedMap
diff --git a/templates/admin/cloudbrain/list.tmpl b/templates/admin/cloudbrain/list.tmpl
index 94f80c0fa..f6d20216a 100755
--- a/templates/admin/cloudbrain/list.tmpl
+++ b/templates/admin/cloudbrain/list.tmpl
@@ -98,7 +98,7 @@
- {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
+ {{if .Cluster}}{{.Cluster}}{{else}}--{{end}}
- {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}
+ {{if .AiCenter}}{{.AiCenter}}{{else}}--{{end}}
diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl
index 5fb9c09d3..de55e6452 100755
--- a/templates/base/head_navbar.tmpl
+++ b/templates/base/head_navbar.tmpl
@@ -35,7 +35,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -48,7 +48,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -89,7 +89,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_fluid.tmpl b/templates/base/head_navbar_fluid.tmpl
index 63291d6fb..8a4682e9d 100644
--- a/templates/base/head_navbar_fluid.tmpl
+++ b/templates/base/head_navbar_fluid.tmpl
@@ -32,7 +32,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -45,7 +45,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -84,7 +84,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_home.tmpl b/templates/base/head_navbar_home.tmpl
index 334ef5a33..f6741b7c8 100644
--- a/templates/base/head_navbar_home.tmpl
+++ b/templates/base/head_navbar_home.tmpl
@@ -24,7 +24,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -37,7 +37,7 @@
{{.i18n.Tr "explore"}}
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -77,7 +77,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
- {{.i18n.Tr "home"}}
+ {{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/base/head_navbar_pro.tmpl b/templates/base/head_navbar_pro.tmpl
index 55a090128..9e1c1ebf7 100644
--- a/templates/base/head_navbar_pro.tmpl
+++ b/templates/base/head_navbar_pro.tmpl
@@ -34,7 +34,7 @@
- {{.i18n.Tr "custom.head.project"}}
+ {{.i18n.Tr "custom.head.project"}}
{{.i18n.Tr "custom.head.dataset"}}
{{.i18n.Tr "repo.model_manager"}}
@@ -47,7 +47,7 @@
{{.i18n.Tr "explore"}}
{{else if .IsLandingPageExplore}}
-
{{.i18n.Tr "home"}}
+
{{.i18n.Tr "home"}}
{{else if .IsLandingPageOrganizations}}
{{.i18n.Tr "home"}}
{{end}}
diff --git a/templates/custom/select_model.tmpl b/templates/custom/select_model.tmpl
index 81332b873..d5e0a998b 100644
--- a/templates/custom/select_model.tmpl
+++ b/templates/custom/select_model.tmpl
@@ -1,6 +1,6 @@
-
-
+
+
diff --git a/templates/explore/navbar.tmpl b/templates/explore/navbar.tmpl
index c8c81defb..3bc907458 100644
--- a/templates/explore/navbar.tmpl
+++ b/templates/explore/navbar.tmpl
@@ -1,6 +1,6 @@