你确认删除该任务么?此任务一旦删除不可恢复。
+diff --git a/custom/conf/app.ini.sample b/custom/conf/app.ini.sample index 9fd00a763..8bc971f2c 100755 --- a/custom/conf/app.ini.sample +++ b/custom/conf/app.ini.sample @@ -1096,9 +1096,48 @@ LOCATION = cn-south-222 BASE_PATH = attachment/ [modelarts] +ORGANIZATION = modelarts ENDPOINT = https://modelarts.cn-south-222.ai.pcl.cn PROJECT_ID = edfccf24aace4e17a56da6bcbb55a5aa PROJECT_NAME = cn-south-222_test USERNAME = test1 PASSWORD = Qizhi@test. DOMAIN = cn-south-222 + +[radar_map] +impact=0.3 +impact_watch=0.1 +impact_star=0.3 +impact_fork=0.3 +impact_code_download=0.2 +impact_comments=0.1 +impact_browser=0.1 + +completeness=0.1 +completeness_issues_closed=0.2 +completeness_releases=0.3 +completeness_develop_age=0.1 +completeness_dataset=0.1 +completeness_model=0.1 +completeness_wiki=0.1 + +liveness=0.3 +liveness_commit=0.2 +liveness_issue=0.2 +liveness_pr=0.2 +liveness_release=0.4 + +project_health=0.1 +project_health_issue_complete_ratio=100 + +team_health=0.1 +team_health_contributors=0.2 +team_health_key_contributors=0.6 +team_health_contributors_added=0.2 + +growth=0.1 +growth_code_lines=0.2 +growth_issue=0.2 +growth_contributors=0.2 +growth_commit=0.2 +growth_comments=0.2 diff --git a/models/attachment.go b/models/attachment.go index 684a38b21..d217a61a4 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -379,7 +379,7 @@ func GetUnDecompressAttachments() ([]*Attachment, error) { func getUnDecompressAttachments(e Engine) ([]*Attachment, error) { attachments := make([]*Attachment, 0, 10) - return attachments, e.Where("decompress_state = ? and dataset_id != 0 and attachment.type = ? and (name like '%.zip' or name like '%.tar.gz' or name like '%.tgz')", DecompressStateInit, TypeCloudBrainOne).Find(&attachments) + return attachments, e.Where("decompress_state = ? and dataset_id != 0 and (name like '%.zip' or name like '%.tar.gz' or name like '%.tgz')", DecompressStateInit).Find(&attachments) } func GetAllPublicAttachments() ([]*AttachmentUsername, error) { @@ -473,3 +473,7 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { return total, nil } + +func GetAllAttachmentSize() (int64, error) { + return x.SumInt(&Attachment{}, "size") +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 4b2bec8e6..af5e9f169 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" "time" + "xorm.io/builder" "xorm.io/xorm" @@ -27,6 +28,8 @@ const ( JobTypeDebug JobType = "DEBUG" JobTypeBenchmark JobType = "BENCHMARK" JobTypeSnn4imagenet JobType = "SNN4IMAGENET" + JobTypeBrainScore JobType = "BRAINSCORE" + JobTypeTrain JobType = "TRAIN" ModelArtsCreateQueue ModelArtsJobStatus = "CREATE_QUEUING" //免费资源创建排队中 ModelArtsCreating ModelArtsJobStatus = "CREATING" //创建中 @@ -46,22 +49,29 @@ const ( ) type Cloudbrain struct { - ID int64 `xorm:"pk autoincr"` - JobID string `xorm:"INDEX NOT NULL"` - JobType string `xorm:"INDEX NOT NULL DEFAULT 'DEBUG'"` - JobName string `xorm:"INDEX"` - Status string `xorm:"INDEX"` - UserID int64 `xorm:"INDEX"` - RepoID int64 `xorm:"INDEX"` - SubTaskName string `xorm:"INDEX"` - ContainerID string - ContainerIp string - CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` - UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` - DeletedAt time.Time `xorm:"deleted"` - CanDebug bool `xorm:"-"` - CanDel bool `xorm:"-"` - Type int `xorm:"INDEX DEFAULT 0"` + ID int64 `xorm:"pk autoincr"` + JobID string `xorm:"INDEX NOT NULL"` + JobType string `xorm:"INDEX NOT NULL DEFAULT 'DEBUG'"` + JobName string `xorm:"INDEX"` + Status string `xorm:"INDEX"` + UserID int64 `xorm:"INDEX"` + RepoID int64 `xorm:"INDEX"` + SubTaskName string `xorm:"INDEX"` + ContainerID string + ContainerIp string + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` + Duration int64 `xorm:"INDEX duration"` + TrainJobDuration string + DeletedAt time.Time `xorm:"deleted"` + CanDebug bool `xorm:"-"` + CanDel bool `xorm:"-"` + Type int `xorm:"INDEX DEFAULT 0"` + + VersionID int64 `xorm:"INDEX DEFAULT 0"` + VersionName string + Uuid string + DatasetName string User *User `xorm:"-"` Repo *Repository `xorm:"-"` @@ -144,29 +154,49 @@ type CloudbrainsOptions struct { SortType string CloudbrainIDs []int64 // JobStatus CloudbrainStatus - Type int + Type int + JobType string } type TaskPod struct { TaskRoleStatus struct { Name string `json:"name"` } `json:"taskRoleStatus"` - TaskStatuses []struct { - TaskIndex int `json:"taskIndex"` - PodUID string `json:"podUid"` - PodIP string `json:"podIp"` - PodName string `json:"podName"` - ContainerID string `json:"containerId"` - ContainerIP string `json:"containerIp"` - ContainerGpus string `json:"containerGpus"` - State string `json:"state"` - StartAt time.Time `json:"startAt"` - FinishedAt time.Time `json:"finishedAt"` - ExitCode int `json:"exitCode"` - ExitDiagnostics string `json:"exitDiagnostics"` - RetriedCount int `json:"retriedCount"` - StartTime string - FinishedTime string - } `json:"taskStatuses"` + //TaskStatuses []struct { + // TaskIndex int `json:"taskIndex"` + // PodUID string `json:"podUid"` + // PodIP string `json:"podIp"` + // PodName string `json:"podName"` + // ContainerID string `json:"containerId"` + // ContainerIP string `json:"containerIp"` + // ContainerGpus string `json:"containerGpus"` + // State string `json:"state"` + // StartAt time.Time `json:"startAt"` + // FinishedAt time.Time `json:"finishedAt"` + // ExitCode int `json:"exitCode"` + // ExitDiagnostics string `json:"exitDiagnostics"` + // RetriedCount int `json:"retriedCount"` + // StartTime string + // FinishedTime string + //} `json:"taskStatuses"` + TaskStatuses []TaskStatuses `json:"taskStatuses"` +} + +type TaskStatuses struct { + TaskIndex int `json:"taskIndex"` + PodUID string `json:"podUid"` + PodIP string `json:"podIp"` + PodName string `json:"podName"` + ContainerID string `json:"containerId"` + ContainerIP string `json:"containerIp"` + ContainerGpus string `json:"containerGpus"` + State string `json:"state"` + StartAt time.Time `json:"startAt"` + FinishedAt time.Time `json:"finishedAt"` + ExitCode int `json:"exitCode"` + ExitDiagnostics string `json:"exitDiagnostics"` + RetriedCount int `json:"retriedCount"` + StartTime string + FinishedTime string } type TaskInfo struct { @@ -254,6 +284,11 @@ func ConvertToJobResultPayload(input map[string]interface{}) (JobResultPayload, err := json.Unmarshal(data, &jobResultPayload) jobResultPayload.JobStatus.StartTime = time.Unix(jobResultPayload.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05") jobResultPayload.JobStatus.EndTime = time.Unix(jobResultPayload.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05") + + if jobResultPayload.JobStatus.State == string(JobWaiting) { + jobResultPayload.JobStatus.StartTime = "-" + jobResultPayload.JobStatus.EndTime = "-" + } return jobResultPayload, err } @@ -530,6 +565,260 @@ type NotebookDelResult struct { InstanceID string `json:"instance_id"` } +type CreateTrainJobParams struct { + JobName string `json:"job_name"` + Description string `json:"job_desc"` + Config Config `json:"config"` + WorkspaceID string `json:"workspace_id"` +} + +type Config struct { + WorkServerNum int `json:"worker_server_num"` + AppUrl string `json:"app_url"` //训练作业的代码目录 + BootFileUrl string `json:"boot_file_url"` //训练作业的代码启动文件,需要在代码目录下 + Parameter []Parameter `json:"parameter"` + DataUrl string `json:"data_url"` //训练作业需要的数据集OBS路径URL + //DatasetID string `json:"dataset_id"` + //DataVersionID string `json:"dataset_version_id"` + //DataSource []DataSource `json:"data_source"` + //SpecID int64 `json:"spec_id"` + EngineID int64 `json:"engine_id"` + //ModelID int64 `json:"model_id"` + TrainUrl string `json:"train_url"` //训练作业的输出文件OBS路径URL + LogUrl string `json:"log_url"` + //UserImageUrl string `json:"user_image_url"` + //UserCommand string `json:"user_command"` + CreateVersion bool `json:"create_version"` + //Volumes []Volumes `json:"volumes"` + Flavor Flavor `json:"flavor"` + PoolID string `json:"pool_id"` +} + +type CreateConfigParams struct { + ConfigName string `json:"config_name"` + Description string `json:"config_desc"` + WorkServerNum int `json:"worker_server_num"` + AppUrl string `json:"app_url"` //训练作业的代码目录 + BootFileUrl string `json:"boot_file_url"` //训练作业的代码启动文件,需要在代码目录下 + Parameter []Parameter `json:"parameter"` + DataUrl string `json:"data_url"` //训练作业需要的数据集OBS路径URL + //DatasetID string `json:"dataset_id"` + //DataVersionID string `json:"dataset_version_id"` + //DataSource []DataSource `json:"data_source"` + //SpecID int64 `json:"spec_id"` + EngineID int64 `json:"engine_id"` + //ModelID int64 `json:"model_id"` + TrainUrl string `json:"train_url"` //训练作业的输出文件OBS路径URL + LogUrl string `json:"log_url"` + //UserImageUrl string `json:"user_image_url"` + //UserCommand string `json:"user_command"` + //CreateVersion bool `json:"create_version"` + //Volumes []Volumes `json:"volumes"` + Flavor Flavor `json:"flavor"` + PoolID string `json:"pool_id"` +} + +type Parameter struct { + Label string `json:"label"` + Value string `json:"value"` +} + +type Parameters struct { + Parameter []Parameter `json:"parameter"` +} + +type DataSource struct { + DatasetID string `json:"dataset_id"` + DatasetVersion string `json:"dataset_version"` + Type string `json:"type"` + DataUrl string `json:"data_url"` +} + +type Volumes struct { + Nfs Nfs `json:"nfs"` + HostPath HostPath `json:"host_path"` +} + +type Nfs struct { + ID string `json:"id"` + SourcePath string `json:"src_path"` + DestPath string `json:"dest_path"` + ReadOnly bool `json:"read_only"` +} + +type HostPath struct { + SourcePath string `json:"src_path"` + DestPath string `json:"dest_path"` + ReadOnly bool `json:"read_only"` +} + +type Flavor struct { + Code string `json:"code"` +} + +type CreateTrainJobResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + JobName string `json:"job_name"` + JobID int64 `json:"job_id"` + Status int `json:"status"` + CreateTime int64 `json:"create_time"` + VersionID int64 `json:"version_id"` + ResourceID string `json:"resource_id"` + VersionName string `json:"version_name"` +} + +type CreateTrainJobConfigResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` +} + +type GetResourceSpecsResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + SpecTotalCount int `json:"spec_total_count"` + Specs []Specs `json:"specs"` +} + +type Specs struct { + Core string `json:"core"` + Cpu string `json:"cpu"` + IsNoResource bool `json:"no_resource"` + GpuType string `json:"gpu_type"` + SpecID int64 `json:"spec_id"` + GpuNum int `json:"gpu_num"` + SpecCode string `json:"spec_code"` + Storage string `json:"storage"` + MaxNum int `json:"max_num"` + UnitNum int `json:"unit_num"` + InterfaceType int `json:"interface_type"` +} + +type GetConfigListResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + ConfigTotalCount int `json:"config_total_count"` + ParaConfigs []ParaConfig `json:"configs"` +} + +type ParaConfig struct { + ConfigName string `json:"config_name"` + ConfigDesc string `json:"config_desc"` + CreateTime int64 `json:"create_time"` + EngineType int `json:"engine_type"` + EngineName string `json:"engine_name"` + EngineId int64 `json:"engine_id"` + EngineVersion string `json:"engine_version"` + UserImageUrl string `json:"user_image_url"` + UserCommand string `json:"user_command"` + Result GetConfigResult +} + +type GetConfigResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + ConfigName string `json:"config_name"` + Description string `json:"config_desc"` + WorkServerNum int `json:"worker_server_num"` + AppUrl string `json:"app_url"` //训练作业的代码目录 + BootFileUrl string `json:"boot_file_url"` //训练作业的代码启动文件,需要在代码目录下 + Parameter []Parameter `json:"parameter"` + DataUrl string `json:"data_url"` //训练作业需要的数据集OBS路径URL + //DatasetID string `json:"dataset_id"` + //DataVersionID string `json:"dataset_version_id"` + //DataSource []DataSource `json:"data_source"` + //SpecID int64 `json:"spec_id"` + EngineID int64 `json:"engine_id"` + //ModelID int64 `json:"model_id"` + TrainUrl string `json:"train_url"` //训练作业的输出文件OBS路径URL + LogUrl string `json:"log_url"` + //UserImageUrl string `json:"user_image_url"` + //UserCommand string `json:"user_command"` + //CreateVersion bool `json:"create_version"` + //Volumes []Volumes `json:"volumes"` + Flavor Flavor `json:"flavor"` + PoolID string `json:"pool_id"` +} + +type ErrorResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_message"` + IsSuccess bool `json:"is_success"` +} + +type GetTrainJobResult struct { + IsSuccess bool `json:"is_success"` + JobName string `json:"job_name"` + JobID int64 `json:"job_id"` + Description string `json:"job_desc"` + IntStatus int `json:"status"` + Status string + LongCreateTime int64 `json:"create_time"` + CreateTime string + Duration int64 `json:"duration"` //训练作业的运行时间,单位为毫秒 + TrainJobDuration string //训练作业的运行时间,格式为hh:mm:ss + VersionID int64 `json:"version_id"` + ResourceID string `json:"resource_id"` + VersionName string `json:"version_name"` + PreVersionID int64 `json:"pre_version_id"` + WorkServerNum int `json:"worker_server_num"` + AppUrl string `json:"app_url"` //训练作业的代码目录 + BootFileUrl string `json:"boot_file_url"` //训练作业的代码启动文件,需要在代码目录下 + Parameter []Parameter `json:"parameter"` + DataUrl string `json:"data_url"` //训练作业需要的数据集OBS路径URL + //DatasetID string `json:"dataset_id"` + //DataVersionID string `json:"dataset_version_id"` + //DataSource []DataSource `json:"data_source"` + //SpecID int64 `json:"spec_id"` + EngineID int64 `json:"engine_id"` + EngineName string `json:"engine_name"` + EngineVersion string `json:"engine_version"` + //ModelID int64 `json:"model_id"` + TrainUrl string `json:"train_url"` //训练作业的输出文件OBS路径URL + LogUrl string `json:"log_url"` + //UserImageUrl string `json:"user_image_url"` + //UserCommand string `json:"user_command"` + //Volumes []Volumes `json:"volumes"` + Flavor Flavor `json:"flavor"` + PoolID string `json:"pool_id"` + PoolName string `json:"pool_name"` + NasMountPath string `json:"nas_mount_path"` + NasShareAddr string `json:"nas_share_addr"` + DatasetName string +} + +type GetTrainJobLogResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + Content string `json:"content"` + Lines int `json:"lines"` + StartLine string `json:"start_line"` + EndLine string `json:"end_line"` +} + +type GetTrainJobLogFileNamesResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` + LogFileList []string `json:"log_file_list"` +} + +type TrainJobResult struct { + ErrorCode string `json:"error_code"` + ErrorMsg string `json:"error_msg"` + IsSuccess bool `json:"is_success"` +} + +type LogFile struct { + Name string +} + func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { sess := x.NewSession() defer sess.Close() @@ -559,6 +848,12 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { ) } + if (opts.JobType) != "" { + cond = cond.And( + builder.Eq{"cloudbrain.job_type": opts.JobType}, + ) + } + // switch opts.JobStatus { // case JobWaiting: // cond.And(builder.Eq{"cloudbrain.status": int(JobWaiting)}) @@ -647,6 +942,12 @@ func SetCloudbrainStatusByJobID(jobID string, status CloudbrainStatus) (err erro return } +func SetTrainJobStatusByJobID(jobID string, status string, duration int64, trainjobduration string) (err error) { + cb := &Cloudbrain{JobID: jobID, Status: string(status), Duration: duration, TrainJobDuration: trainjobduration} + _, err = x.Cols("status", "duration", "train_job_duration").Where("cloudbrain.job_id=?", jobID).Update(cb) + return +} + func UpdateJob(job *Cloudbrain) error { return updateJob(x, job) } @@ -658,6 +959,17 @@ func updateJob(e Engine, job *Cloudbrain) error { return err } +// func UpdateTrainJob(job *CloudbrainInfo) error { +// return updateTrainJob(x, job) +// } + +// func updateTrainJob(e Engine, job *CloudbrainInfo) error { +// var sess *xorm.Session +// sess = e.Where("job_id = ?", job.Cloudbrain.JobID) +// _, err := sess.Cols("status", "container_id", "container_ip").Update(job) +// return err +// } + func DeleteJob(job *Cloudbrain) error { return deleteJob(x, job) } @@ -673,7 +985,7 @@ func GetCloudbrainByName(jobName string) (*Cloudbrain, error) { } func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool { - if !isSigned || job.Status != string(JobStopped) { + if !isSigned || (job.Status != string(JobStopped) && job.Status != string(JobFailed) && job.Status != string(ModelArtsStartFailed) && job.Status != string(ModelArtsCreateFailed)) { return false } repo, err := GetRepositoryByID(job.RepoID) diff --git a/models/dataset.go b/models/dataset.go index e7160006d..402a548ef 100755 --- a/models/dataset.go +++ b/models/dataset.go @@ -139,7 +139,14 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { if opts.IncludePublic { cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) if opts.OwnerID > 0 { - cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) + if len(opts.Keyword) == 0 { + cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID}) + } else { + subCon := builder.NewCond() + subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword}) + cond = cond.Or(subCon) + + } } } else if opts.OwnerID > 0 { cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID}) diff --git a/models/file_chunk.go b/models/file_chunk.go index b849f0108..76c926dc5 100755 --- a/models/file_chunk.go +++ b/models/file_chunk.go @@ -14,8 +14,8 @@ const ( ) const ( - TypeCloudBrainOne = 0 - TypeCloudBrainTwo = 1 + TypeCloudBrainOne int = iota + TypeCloudBrainTwo ) type FileChunk struct { diff --git a/models/models.go b/models/models.go index 412148235..696d0949b 100755 --- a/models/models.go +++ b/models/models.go @@ -137,7 +137,9 @@ func init() { tablesStatistic = append(tablesStatistic, new(RepoStatistic), + new(SummaryStatistic), new(UserBusinessAnalysis), + new(UserLoginLog), ) gonicNames := []string{"SSL", "UID"} diff --git a/models/repo.go b/models/repo.go index 7f4bfebba..c8629875e 100755 --- a/models/repo.go +++ b/models/repo.go @@ -6,13 +6,14 @@ package models import ( - "code.gitea.io/gitea/modules/blockchain" "context" "crypto/md5" "errors" "fmt" "html/template" + "code.gitea.io/gitea/modules/blockchain" + // Needed for jpeg support _ "image/jpeg" "image/png" @@ -171,11 +172,11 @@ type Repository struct { NumOpenIssues int `xorm:"-"` NumPulls int NumClosedPulls int - NumOpenPulls int `xorm:"-"` - NumMilestones int `xorm:"NOT NULL DEFAULT 0"` - NumClosedMilestones int `xorm:"NOT NULL DEFAULT 0"` - NumOpenMilestones int `xorm:"-"` - NumCommit int64 `xorm:"NOT NULL DEFAULT 0"` + NumOpenPulls int `xorm:"-"` + NumMilestones int `xorm:"NOT NULL DEFAULT 0"` + NumClosedMilestones int `xorm:"NOT NULL DEFAULT 0"` + NumOpenMilestones int `xorm:"-"` + NumCommit int64 `xorm:"NOT NULL DEFAULT 0"` IsPrivate bool `xorm:"INDEX"` IsEmpty bool `xorm:"INDEX"` @@ -215,8 +216,8 @@ type Repository struct { CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` - Hot int64 `xorm:"-"` - Active int64 `xorm:"-"` + Hot int64 `xorm:"-"` + Active int64 `xorm:"-"` } // SanitizedOriginalURL returns a sanitized OriginalURL @@ -1430,6 +1431,15 @@ func GetAllRepositoriesByFilterCols(columns ...string) ([]*Repository, error) { } +func GetAllRepositoriesCount() (int64, error) { + repo := new(Repository) + return x.Count(repo) +} + +func GetAllRepositoriesSize() (int64, error) { + return x.SumInt(&Repository{}, "size") +} + func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err error) { repo.LowerName = strings.ToLower(repo.Name) @@ -2464,7 +2474,7 @@ func (repo *Repository) IncreaseCloneCnt() { } func UpdateRepositoryCommitNum(repo *Repository) error { - if _,err := x.Exec("UPDATE `repository` SET num_commit = ? where id = ?", repo.NumCommit, repo.ID); err != nil { + if _, err := x.Exec("UPDATE `repository` SET num_commit = ? where id = ?", repo.NumCommit, repo.ID); err != nil { return err } diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index f6cbf0331..9cb7e4a09 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -1,13 +1,115 @@ package models -import "code.gitea.io/gitea/modules/git" +import ( + "fmt" + "strings" + "time" + + "code.gitea.io/gitea/modules/git" +) func GetRepoKPIStats(repo *Repository) (*git.RepoKPIStats, error) { wikiPath := "" if repo.HasWiki() { wikiPath = repo.WikiPath() } - return git.GetRepoKPIStats(repo.RepoPath(), wikiPath) + return getRepoKPIStats(repo.RepoPath(), wikiPath) +} + +func getRepoKPIStats(repoPath string, wikiPath string) (*git.RepoKPIStats, error) { + stats := &git.RepoKPIStats{} + + contributors, err := git.GetContributors(repoPath) + if err != nil { + return nil, err + } + timeUntil := time.Now() + fourMonthAgo := timeUntil.AddDate(0, -4, 0) + recentlyContributors, err := git.GetContributorsDetail(repoPath, fourMonthAgo) + newContributersDict := make(map[string]struct{}) + if err != nil { + return nil, err + } + + if contributors != nil { + contributorDistinctDict := make(map[string]int, 0) + keyContributorsDict := make(map[string]struct{}, 0) + + for _, contributor := range contributors { + if strings.Compare(contributor.Email, "") == 0 { + continue + } + + user, err := GetUserByActivateEmail(contributor.Email) + if err == nil { + value, ok := contributorDistinctDict[user.Email] + if !ok { + contributorDistinctDict[user.Email] = contributor.CommitCnt + } else { + contributorDistinctDict[user.Email] = value + contributor.CommitCnt + } + setKeyContributerDict(contributorDistinctDict, user.Email, keyContributorsDict) + + } else { + value, ok := contributorDistinctDict[contributor.Email] + if !ok { + contributorDistinctDict[contributor.Email] = contributor.CommitCnt + } else { + contributorDistinctDict[contributor.Email] = value + contributor.CommitCnt + } + setKeyContributerDict(contributorDistinctDict, contributor.Email, keyContributorsDict) + } + + } + + if recentlyContributors != nil { + for _, recentlyContributor := range recentlyContributors { + + user, err := GetUserByActivateEmail(recentlyContributor.Email) + var ok bool + if err == nil { + _, ok = contributorDistinctDict[user.Email] + } else { + _, ok = contributorDistinctDict[recentlyContributor.Email] + } + + if !ok { + stats.ContributorsAdded++ + newContributersDict[recentlyContributor.Email] = struct{}{} + } + + } + } + + stats.Contributors = int64(len(contributorDistinctDict)) + stats.KeyContributors = int64(len(keyContributorsDict)) + + } + + err = git.SetDevelopAge(repoPath, stats) + if err != nil { + return nil, fmt.Errorf("FillFromGit: %v", err) + } + err = git.SetRepoKPIStats(repoPath, fourMonthAgo, stats, newContributersDict) + + if err != nil { + return nil, fmt.Errorf("FillFromGit: %v", err) + } + + git.SetWikiPages(wikiPath, stats) + return stats, nil + +} + +func setKeyContributerDict(contributorDistinctDict map[string]int, email string, keyContributorsDict map[string]struct{}) { + if contributorDistinctDict[email] >= 3 { + _, ok := keyContributorsDict[email] + if !ok { + keyContributorsDict[email] = struct{}{} + + } + + } } func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { diff --git a/models/repo_statistic.go b/models/repo_statistic.go index b987f4f46..adef672e0 100755 --- a/models/repo_statistic.go +++ b/models/repo_statistic.go @@ -1,38 +1,64 @@ package models import ( - "code.gitea.io/gitea/modules/timeutil" "fmt" + "time" + + "code.gitea.io/gitea/modules/timeutil" ) // RepoStatistic statistic info of all repository type RepoStatistic struct { - ID int64 `xorm:"pk autoincr"` - RepoID int64 `xorm:"unique(s) NOT NULL"` - Date string `xorm:"unique(s) NOT NULL"` - NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` - NumStars int64 `xorm:"NOT NULL DEFAULT 0"` - NumForks int64 `xorm:"NOT NULL DEFAULT 0"` - NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"` - NumComments int64 `xorm:"NOT NULL DEFAULT 0"` - NumVisits int64 `xorm:"NOT NULL DEFAULT 0"` - NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"` - NumVersions int64 `xorm:"NOT NULL DEFAULT 0"` - //develop months - NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"` - RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` - DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` - NumModels int64 `xorm:"NOT NULL DEFAULT 0"` - NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"` - NumCommits int64 `xorm:"NOT NULL DEFAULT 0"` - NumIssues int64 `xorm:"NOT NULL DEFAULT 0"` - NumPulls int64 `xorm:"NOT NULL DEFAULT 0"` - IssueFixedRate float32 `xorm:"NOT NULL"` - NumContributor int64 `xorm:"NOT NULL DEFAULT 0"` - NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"` - - CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` - UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"unique(s) NOT NULL"` + Name string `xorm:"INDEX"` + IsPrivate bool + Date string `xorm:"unique(s) NOT NULL"` + NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` + NumWatchesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumStars int64 `xorm:"NOT NULL DEFAULT 0"` + NumStarsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumForks int64 `xorm:"NOT NULL DEFAULT 0"` + NumForksAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"` + NumDownloadsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumComments int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommentsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumVisits int64 `xorm:"NOT NULL DEFAULT 0"` + NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumClosedIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumVersions int64 `xorm:"NOT NULL DEFAULT 0"` + NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"` + RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` + DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` + NumModels int64 `xorm:"NOT NULL DEFAULT 0"` + NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommits int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumPulls int64 `xorm:"NOT NULL DEFAULT 0"` + NumPullsAdded int64 `xorm:"NOT NULL DEFAULT 0"` + IssueFixedRate float32 `xorm:"NOT NULL"` + NumContributor int64 `xorm:"NOT NULL DEFAULT 0"` + NumContributorAdded int64 `xorm:"NOT NULL DEFAULT 0"` + NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"` + + NumContributorsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommitLinesGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssuesGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommentsGrowth int64 `xorm:"NOT NULL DEFAULT 0"` + + Impact float64 `xorm:"NOT NULL DEFAULT 0"` + Completeness float64 `xorm:"NOT NULL DEFAULT 0"` + Liveness float64 `xorm:"NOT NULL DEFAULT 0"` + ProjectHealth float64 `xorm:"NOT NULL DEFAULT 0"` + TeamHealth float64 `xorm:"NOT NULL DEFAULT 0"` + Growth float64 `xorm:"NOT NULL DEFAULT 0"` + RadarTotal float64 `xorm:"NOT NULL DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` } func DeleteRepoStatDaily(date string) error { @@ -55,6 +81,35 @@ func DeleteRepoStatDaily(date string) error { return nil } +func GetRepoStatisticByDate(date string) ([]*RepoStatistic, error) { + repoStatistics := make([]*RepoStatistic, 0) + err := xStatistic.Where("date = ?", date).Find(&repoStatistics) + return repoStatistics, err + +} + +func GetOneRepoStatisticBeforeTime(time time.Time) (*RepoStatistic, error) { + repoStatistics := make([]*RepoStatistic, 0) + err := xStatistic.Where("created_unix >= ?", time.Unix()).OrderBy("created_unix").Limit(1).Find(&repoStatistics) + if err != nil { + return nil, err + } else { + if len(repoStatistics) == 0 { + return nil, fmt.Errorf("the repo statistic record count is 0") + } else { + return repoStatistics[0], nil + } + } + +} + func InsertRepoStat(repoStat *RepoStatistic) (int64, error) { return xStatistic.Insert(repoStat) } + +func UpdateRepoStat(repoStat *RepoStatistic) error { + sql := "update repo_statistic set impact=?,completeness=?,liveness=?,project_health=?,team_health=?,growth=?,radar_total=? where repo_id=? and date=?" + + _, err := xStatistic.Exec(sql, repoStat.Impact, repoStat.Completeness, repoStat.Liveness, repoStat.ProjectHealth, repoStat.TeamHealth, repoStat.Growth, repoStat.RadarTotal, repoStat.RepoID, repoStat.Date) + return err +} diff --git a/models/summary_statistic.go b/models/summary_statistic.go new file mode 100644 index 000000000..0addd472b --- /dev/null +++ b/models/summary_statistic.go @@ -0,0 +1,69 @@ +package models + +import ( + "fmt" + + "code.gitea.io/gitea/modules/timeutil" +) + +var DomainMap = map[string]int{ + "大模型": 0, + "ai开发工具": 1, + "计算机视觉": 2, + "自然语言处理": 3, + "机器学习": 4, + "神经网络": 5, + "自动驾驶": 6, + "机器人": 7, + "联邦学习": 8, + "数据挖掘": 9, + "risc-v开发": 10, +} + +type SummaryStatistic struct { + ID int64 `xorm:"pk autoincr"` + Date string `xorm:"unique(s) NOT NULL"` + NumUsers int64 `xorm:"NOT NULL DEFAULT 0"` + RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` + DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` + NumOrganizations int64 `xorm:"NOT NULL DEFAULT 0"` + NumModels int64 `xorm:"NOT NULL DEFAULT 0"` + NumRepos int64 `xorm:"NOT NULL DEFAULT 0"` + NumRepoBigModel int `xorm:"NOT NULL DEFAULT 0"` + NumRepoAI int `xorm:"NOT NULL DEFAULT 0"` + NumRepoVision int `xorm:"NOT NULL DEFAULT 0"` + NumRepoNLP int `xorm:"NOT NULL DEFAULT 0"` + NumRepoML int `xorm:"NOT NULL DEFAULT 0"` + NumRepoNN int `xorm:"NOT NULL DEFAULT 0"` + NumRepoAutoDrive int `xorm:"NOT NULL DEFAULT 0"` + NumRepoRobot int `xorm:"NOT NULL DEFAULT 0"` + NumRepoLeagueLearn int `xorm:"NOT NULL DEFAULT 0"` + NumRepoDataMining int `xorm:"NOT NULL DEFAULT 0"` + NumRepoRISC int `xorm:"NOT NULL DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +func DeleteSummaryStatisticDaily(date string) error { + sess := xStatistic.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return fmt.Errorf("Begin: %v", err) + } + + if _, err := sess.Where("date = ?", date).Delete(&SummaryStatistic{}); err != nil { + return fmt.Errorf("Delete: %v", err) + } + + if err := sess.Commit(); err != nil { + sess.Close() + return fmt.Errorf("Commit: %v", err) + } + + sess.Close() + return nil +} + +func InsertSummaryStatistic(summaryStatistic *SummaryStatistic) (int64, error) { + return xStatistic.Insert(summaryStatistic) +} diff --git a/models/topic.go b/models/topic.go index b8d3d9d85..5533da7bc 100644 --- a/models/topic.go +++ b/models/topic.go @@ -98,6 +98,13 @@ func GetTopicByName(name string) (*Topic, error) { return &topic, nil } +func GetAllUsedTopics() ([]*Topic, error) { + topics := make([]*Topic, 0) + err := x.Where("repo_count > ?", 0).Find(&topics) + return topics, err + +} + // addTopicByNameToRepo adds a topic name to a repo and increments the topic count. // Returns topic after the addition func addTopicByNameToRepo(e Engine, repoID int64, topicName string) (*Topic, error) { @@ -178,7 +185,7 @@ func (opts *FindTopicOptions) toConds() builder.Cond { } if opts.Keyword != "" { - cond = cond.And(builder.Like{"topic.name", opts.Keyword}) + cond = cond.And(builder.Like{"topic.name", strings.ToLower(opts.Keyword)}) } return cond diff --git a/models/user.go b/models/user.go index 78ab4627a..1ee20d74c 100755 --- a/models/user.go +++ b/models/user.go @@ -2071,6 +2071,18 @@ func SyncExternalUsers(ctx context.Context, updateExisting bool) error { return nil } +func GetUsersCount() (int64, error) { + user := new(User) + return x.Where("type=0").Count(user) + +} + +func GetOrganizationsCount() (int64, error) { + user := new(User) + return x.Where("type=1").Count(user) + +} + func GetBlockChainUnSuccessUsers() ([]*User, error) { users := make([]*User, 0, 10) err := x.Where("public_key = ''"). diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index a7d549cd4..bb6726a2c 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -71,7 +71,50 @@ type UserBusinessAnalysis struct { Name string `xorm:"NOT NULL"` } -func CountData(wikiCountMap map[string]int) { +func QueryUserStaticData(startTime int64, endTime int64) []*UserBusinessAnalysis { + log.Info("query startTime =" + fmt.Sprint(startTime) + " endTime=" + fmt.Sprint(endTime)) + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + + statictisSess.Select("*").Table("user_business_analysis").Where(" count_date>=" + fmt.Sprint(startTime) + " and count_date<=" + fmt.Sprint(endTime)).OrderBy("count_date desc") + + userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) + statictisSess.Find(&userBusinessAnalysisList) + + resultMap := make(map[int64]*UserBusinessAnalysis) + log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList))) + for _, userRecord := range userBusinessAnalysisList { + if _, ok := resultMap[userRecord.ID]; !ok { + resultMap[userRecord.ID] = userRecord + } else { + resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount + resultMap[userRecord.ID].CommitCount += userRecord.CommitCount + resultMap[userRecord.ID].IssueCount += userRecord.IssueCount + resultMap[userRecord.ID].CommentCount += userRecord.CommentCount + resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount + resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount + resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount + resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize + resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize + resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount + resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount + resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount + resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount + resultMap[userRecord.ID].LoginCount += userRecord.LoginCount + } + } + + userBusinessAnalysisReturnList := make([]*UserBusinessAnalysis, len(resultMap)) + index := 0 + for _, v := range resultMap { + userBusinessAnalysisReturnList[index] = v + index += 1 + } + log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList))) + return userBusinessAnalysisReturnList +} + +func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) { log.Info("start to count other user info data") sess := x.NewSession() defer sess.Close() @@ -82,17 +125,17 @@ func CountData(wikiCountMap map[string]int) { currentTimeNow := time.Now() log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05")) - yesterday := currentTimeNow.AddDate(0, 0, -1) - startTime := time.Date(yesterday.Year(), yesterday.Month(), yesterday.Day(), 0, 0, 0, 0, yesterday.Location()) + //yesterday := currentTimeNow.AddDate(0, 0, -1) + //startTime := time.Date(yesterday.Year(), yesterday.Month(), yesterday.Day(), 0, 0, 0, 0, yesterday.Location()) start_unix := startTime.Unix() log.Info("DB query time:" + startTime.Format("2006-01-02 15:04:05")) - endTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) + //endTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) end_unix := endTime.Unix() CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location()) - CodeMergeCountMap := queryAction(start_unix, end_unix, 11) + CodeMergeCountMap := queryPullRequest(start_unix, end_unix) CommitCountMap := queryAction(start_unix, end_unix, 5) IssueCountMap := queryAction(start_unix, end_unix, 10) @@ -110,12 +153,19 @@ func CountData(wikiCountMap map[string]int) { CommitDatasetSizeMap := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) + LoginCountMap := queryLoginCount(start_unix, end_unix) + + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() for i, userRecord := range userList { var dateRecord UserBusinessAnalysis dateRecord.ID = userRecord.ID log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) dateRecord.CountDate = CountDate.Unix() + + statictisSess.Delete(&dateRecord) + dateRecord.Email = userRecord.Email dateRecord.RegistDate = userRecord.CreatedUnix dateRecord.Name = userRecord.Name @@ -192,10 +242,14 @@ func CountData(wikiCountMap map[string]int) { dateRecord.CreateRepoCount = CreateRepoCountMap[dateRecord.ID] } + if _, ok := LoginCountMap[dateRecord.ID]; !ok { + dateRecord.LoginCount = 0 + } else { + dateRecord.LoginCount = LoginCountMap[dateRecord.ID] + } + dateRecord.CommitModelCount = 0 - statictisSess := xStatistic.NewSession() - defer statictisSess.Close() statictisSess.Insert(&dateRecord) } @@ -223,6 +277,28 @@ func querySolveIssue(start_unix int64, end_unix int64) map[int64]int { } +func queryPullRequest(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + + sess.Select("issue.*").Table("issue"). + Join("inner", "pull_request", "issue.id=pull_request.issue_id"). + Where("pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix)) + + issueList := make([]*Issue, 0) + sess.Find(&issueList) + resultMap := make(map[int64]int) + log.Info("query issue(PR) size=" + fmt.Sprint(len(issueList))) + for _, issueRecord := range issueList { + if _, ok := resultMap[issueRecord.PosterID]; !ok { + resultMap[issueRecord.PosterID] = 1 + } else { + resultMap[issueRecord.PosterID] += 1 + } + } + return resultMap +} + func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int { sess := x.NewSession() defer sess.Close() @@ -341,7 +417,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int { func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int { sess := x.NewSession() defer sess.Close() - sess.Select("id,owner_id,name").Table("repository").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + sess.Select("id,owner_id,name").Table("repository").Where("is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) repoList := make([]*Repository, 0) sess.Find(&repoList) resultMap := make(map[int64]int) @@ -354,7 +430,24 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int { } } return resultMap +} +func queryLoginCount(start_unix int64, end_unix int64) map[int64]int { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + statictisSess.Select("id,u_id").Table("user_login_log").Where("created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + userLoginLogList := make([]*UserLoginLog, 0) + statictisSess.Find(&userLoginLogList) + resultMap := make(map[int64]int) + log.Info("query user login size=" + fmt.Sprint(len(userLoginLogList))) + for _, loginRecord := range userLoginLogList { + if _, ok := resultMap[loginRecord.UId]; !ok { + resultMap[loginRecord.UId] = 1 + } else { + resultMap[loginRecord.UId] += 1 + } + } + return resultMap } func subMonth(t1, t2 time.Time) (month int) { diff --git a/models/user_login_log.go b/models/user_login_log.go new file mode 100644 index 000000000..4a499d527 --- /dev/null +++ b/models/user_login_log.go @@ -0,0 +1,34 @@ +package models + +import ( + "net/http" + + "code.gitea.io/gitea/modules/timeutil" +) + +type UserLoginLog struct { + ID int64 `xorm:"pk autoincr"` + UId int64 `xorm:"NOT NULL"` + IpAddr string `xorm:"default NULL"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +func SaveLoginInfoToDb(r *http.Request, u *User) { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + + var dateRecord UserLoginLog + + dateRecord.UId = u.ID + dateRecord.IpAddr = getIP(r) + + statictisSess.Insert(&dateRecord) +} + +func getIP(r *http.Request) string { + forwarded := r.Header.Get("X-FORWARDED-FOR") + if forwarded != "" { + return forwarded + } + return r.RemoteAddr +} diff --git a/modules/auth/modelarts.go b/modules/auth/modelarts.go index 0be3e3882..f2e5aeed5 100755 --- a/modules/auth/modelarts.go +++ b/modules/auth/modelarts.go @@ -14,3 +14,32 @@ type CreateModelArtsForm struct { func (f *CreateModelArtsForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { return validate(errs, ctx.Data, f, ctx.Locale) } + +type CreateModelArtsNotebookForm struct { + JobName string `form:"job_name" binding:"Required"` + Attachment string `form:"attachment"` + Description string `form:"description"` +} + +func (f *CreateModelArtsNotebookForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} + +type CreateModelArtsTrainJobForm struct { + JobName string `form:"job_name" binding:"Required"` + Attachment string `form:"attachment" binding:"Required"` + BootFile string `form:"boot_file" binding:"Required"` + WorkServerNumber int `form:"work_server_number" binding:"Required"` + EngineID int `form:"engine_id" binding:"Required"` + PoolID string `form:"pool_id" binding:"Required"` + Flavor string `form:"flavor" binding:"Required"` + Params string `form:"run_para_list" binding:"Required"` + Description string `form:"description"` + IsSaveParam string `form:"is_save_para"` + ParameterTemplateName string `form:"parameter_template_name"` + PrameterDescription string `form:"parameter_description"` +} + +func (f *CreateModelArtsTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { + return validate(errs, ctx.Data, f, ctx.Locale) +} diff --git a/modules/base/tool.go b/modules/base/tool.go index 8145522e2..cf2972990 100644 --- a/modules/base/tool.go +++ b/modules/base/tool.go @@ -224,7 +224,7 @@ func SizedAvatarLinkWithDomain(email string, size int) string { // FileSize calculates the file size and generate user-friendly string. func FileSize(s int64) string { - return humanize.IBytes(uint64(s)) + return humanize.Bytes(uint64(s)) } // PrettyNumber produces a string form of the given number in base 10 with diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 0de1db9a6..8f6bf4e17 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -16,6 +16,7 @@ const ( ModelMountPath = "/model" BenchMarkMountPath = "/benchmark" Snn4imagenetMountPath = "/snn4imagenet" + BrainScoreMountPath = "/brainscore" TaskInfoName = "/taskInfo" SubTaskName = "task1" @@ -27,7 +28,7 @@ var ( ResourceSpecs *models.ResourceSpecs ) -func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, jobType, gpuQueue string, resourceSpecId int) error { +func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue string, resourceSpecId int) error { dataActualPath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.Attachment.Minio.BasePath + @@ -103,6 +104,13 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, ReadOnly: true, }, }, + { + HostPath: models.StHostPath{ + Path: brainScorePath, + MountPath: BrainScoreMountPath, + ReadOnly: true, + }, + }, }, }) if err != nil { @@ -123,7 +131,8 @@ func GenerateTask(ctx *context.Context, jobName, image, command, uuid, codePath, JobName: jobName, SubTaskName: SubTaskName, JobType: jobType, - Type: models.TypeCloudBrainOne, + Type: models.TypeCloudBrainOne, + Uuid: uuid, }) if err != nil { diff --git a/modules/context/context.go b/modules/context/context.go old mode 100644 new mode 100755 index 71c8986fb..6877780e3 --- a/modules/context/context.go +++ b/modules/context/context.go @@ -310,9 +310,11 @@ func Contexter() macaron.Handler { ctx.Data["SignedUserID"] = ctx.User.ID ctx.Data["SignedUserName"] = ctx.User.Name ctx.Data["IsAdmin"] = ctx.User.IsAdmin + c.Data["SignedUserName"] = ctx.User.Name } else { ctx.Data["SignedUserID"] = int64(0) ctx.Data["SignedUserName"] = "" + c.Data["SignedUserName"] = "" } // If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid. diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go index 26cd16778..ed9829cef 100755 --- a/modules/cron/tasks_basic.go +++ b/modules/cron/tasks_basic.go @@ -174,6 +174,16 @@ func registerHandleRepoStatistic() { }) } +func registerHandleSummaryStatistic() { + RegisterTaskFatal("handle_summary_statistic", &BaseConfig{ + Enabled: true, + RunAtStart: false, + Schedule: "@daily", + }, func(ctx context.Context, _ *models.User, _ Config) error { + repo.SummaryStatistic() + return nil + }) +} func registerHandleUserStatistic() { RegisterTaskFatal("handle_user_statistic", &BaseConfig{ Enabled: true, @@ -202,4 +212,5 @@ func initBasicTasks() { registerHandleRepoStatistic() registerHandleUserStatistic() + registerHandleSummaryStatistic() } diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go index f7556d5c2..5d99bd8af 100644 --- a/modules/git/repo_stats_custom.go +++ b/modules/git/repo_stats_custom.go @@ -35,58 +35,7 @@ type UserKPITypeStats struct { isNewContributor bool //是否是4个月内的新增贡献者 } -func GetRepoKPIStats(repoPath string, wikiPath string) (*RepoKPIStats, error) { - stats := &RepoKPIStats{} - - contributors, err := GetContributors(repoPath) - if err != nil { - return nil, err - } - timeUntil := time.Now() - fourMonthAgo := timeUntil.AddDate(0, -4, 0) - recentlyContributors, err := getContributors(repoPath, fourMonthAgo) - newContributersDict := make(map[string]struct{}) - if err != nil { - return nil, err - } - - if contributors != nil { - stats.Contributors = int64(len(contributors)) - for _, contributor := range contributors { - if contributor.CommitCnt >= 3 { - stats.KeyContributors++ - } - - if recentlyContributors != nil { - for _, recentlyContributor := range recentlyContributors { - if recentlyContributor.Email == contributor.Email && recentlyContributor.CommitCnt == contributor.CommitCnt { - stats.ContributorsAdded++ - newContributersDict[recentlyContributor.Email] = struct{}{} - } - - } - } - - } - - } - - err = setDevelopAge(repoPath, stats) - if err != nil { - return nil, fmt.Errorf("FillFromGit: %v", err) - } - err = setRepoKPIStats(repoPath, fourMonthAgo, stats, newContributersDict) - - if err != nil { - return nil, fmt.Errorf("FillFromGit: %v", err) - } - - setWikiPages(wikiPath, stats) - return stats, nil - -} - -func setDevelopAge(repoPath string, stats *RepoKPIStats) error { +func SetDevelopAge(repoPath string, stats *RepoKPIStats) error { args := []string{"log", "--no-merges", "--branches=*", "--format=%cd", "--date=short"} stdout, err := NewCommand(args...).RunInDirBytes(repoPath) if err != nil { @@ -173,7 +122,7 @@ func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { } -func setRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { +func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { since := fromTime.Format(time.RFC3339) args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} @@ -259,7 +208,7 @@ func setRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, n } -func getContributors(repoPath string, fromTime time.Time) ([]Contributor, error) { +func GetContributorsDetail(repoPath string, fromTime time.Time) ([]Contributor, error) { since := fromTime.Format(time.RFC3339) cmd := NewCommand("shortlog", "-sne", "--all", fmt.Sprintf("--since='%s'", since)) stdout, err := cmd.RunInDir(repoPath) @@ -289,7 +238,7 @@ func getContributors(repoPath string, fromTime time.Time) ([]Contributor, error) return nil, nil } -func setWikiPages(wikiPath string, stats *RepoKPIStats) { +func SetWikiPages(wikiPath string, stats *RepoKPIStats) { wikiPages := 0 if wikiPath == "" { diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index edd9d5d6b..e1dbe9f5a 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -1,22 +1,53 @@ package modelarts import ( + "encoding/json" + "path" + "strconv" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/storage" - "encoding/json" - "path" ) const ( + //notebook storageTypeOBS = "obs" autoStopDuration = 4 * 60 * 60 DataSetMountPath = "/home/ma-user/work" NotebookEnv = "Python3" NotebookType = "Ascend" + FlavorInfo = "Ascend: 1*Ascend 910 CPU: 24 核 96GiB (modelarts.kat1.xlarge)" + + //train-job + // ResourcePools = "{\"resource_pool\":[{\"id\":\"pool1328035d\", \"value\":\"专属资源池\"}]}" + // Engines = "{\"engine\":[{\"id\":1, \"value\":\"Ascend-Powered-Engine\"}]}" + // EngineVersions = "{\"version\":[{\"id\":118,\"value\":\"MindSpore-1.0.0-c75-python3.7-euleros2.8-aarch64\"}," + + // "{\"id\":119,\"value\":\"MindSpore-1.1.1-c76-python3.7-euleros2.8-aarch64\"}," + + // "{\"id\":120,\"value\":\"MindSpore-1.1.1-c76-tr5-python3.7-euleros2.8-aarch64\"}," + + // "{\"id\":117,\"value\":\"TF-1.15-c75-python3.7-euleros2.8-aarch64\"}" + + // "]}" + // TrainJobFlavorInfo = "{\"flavor\":[{\"code\":\"modelarts.bm.910.arm.public.2\",\"value\":\"Ascend : 2 * Ascend 910 CPU:48 核 512GiB\"}," + + // "{\"code\":\"modelarts.bm.910.arm.public.8\",\"value\":\"Ascend : 8 * Ascend 910 CPU:192 核 2048GiB\"}," + + // "{\"code\":\"modelarts.bm.910.arm.public.4\",\"value\":\"Ascend : 4 * Ascend 910 CPU:96 核 1024GiB\"}," + + // "{\"code\":\"modelarts.bm.910.arm.public.1\",\"value\":\"Ascend : 1 * Ascend 910 CPU:24 核 256GiB\"}" + + // "]}" + CodePath = "/code/" + OutputPath = "/output/" + LogPath = "/log/" + JobPath = "/job/" + OrderDesc = "desc" //向下查询 + OrderAsc = "asc" //向上查询 + Lines = 20 + TrainUrl = "train_url" + DataUrl = "data_url" + PerPage = 10 + + SortByCreateTime = "create_time" + ConfigTypeCustom = "custom" ) var ( @@ -24,6 +55,50 @@ var ( FlavorInfos *models.FlavorInfos ) +type GenerateTrainJobReq struct { + JobName string + Uuid string + Description string + CodeObsPath string + BootFile string + DataUrl string + TrainUrl string + FlavorCode string + LogUrl string + PoolID string + WorkServerNumber int + EngineID int64 + Parameters []models.Parameter +} + +type VersionInfo struct { + Version []struct { + ID int `json:"id"` + Value string `json:"value"` + } `json:"version"` +} + +type Flavor struct { + Info []struct { + Code string `json:"code"` + Value string `json:"value"` + } `json:"flavor"` +} + +type Engine struct { + Info []struct { + ID int `json:"id"` + Value string `json:"value"` + } `json:"engine"` +} + +type ResourcePool struct { + Info []struct { + ID string `json:"id"` + Value string `json:"value"` + } `json:"resource_pool"` +} + func GenerateTask(ctx *context.Context, jobName, uuid, description string) error { var dataActualPath string if uuid != "" { @@ -76,8 +151,8 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description string) error log.Error("CreateJob failed: %v", err.Error()) return err } - err = models.CreateCloudbrain(&models.Cloudbrain{ + Status: string(models.JobWaiting), UserID: ctx.User.ID, RepoID: ctx.Repo.Repository.ID, @@ -85,6 +160,7 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description string) error JobName: jobName, JobType: string(models.JobTypeDebug), Type: models.TypeCloudBrainTwo, + Uuid: uuid, }) if err != nil { @@ -93,3 +169,110 @@ func GenerateTask(ctx *context.Context, jobName, uuid, description string) error return nil } + +func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) error { + jobResult, err := createTrainJob(models.CreateTrainJobParams{ + JobName: req.JobName, + Description: req.Description, + Config: models.Config{ + WorkServerNum: req.WorkServerNumber, + AppUrl: req.CodeObsPath, + BootFileUrl: req.BootFile, + DataUrl: req.DataUrl, + EngineID: req.EngineID, + TrainUrl: req.TrainUrl, + LogUrl: req.LogUrl, + PoolID: req.PoolID, + CreateVersion: true, + Flavor: models.Flavor{ + Code: req.FlavorCode, + }, + Parameter: req.Parameters, + }, + }) + if err != nil { + log.Error("CreateJob failed: %v", err.Error()) + return err + } + + attach, err := models.GetAttachmentByUUID(req.Uuid) + if err != nil { + log.Error("GetAttachmentByUUID(%s) failed:%v", strconv.FormatInt(jobResult.JobID, 10), err.Error()) + return nil + } + + err = models.CreateCloudbrain(&models.Cloudbrain{ + Status: TransTrainJobStatus(jobResult.Status), + UserID: ctx.User.ID, + RepoID: ctx.Repo.Repository.ID, + JobID: strconv.FormatInt(jobResult.JobID, 10), + JobName: req.JobName, + JobType: string(models.JobTypeTrain), + Type: models.TypeCloudBrainTwo, + VersionID: jobResult.VersionID, + VersionName: jobResult.VersionName, + Uuid: req.Uuid, + DatasetName: attach.Name, + }) + + if err != nil { + log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error()) + return err + } + + return nil +} + +func TransTrainJobStatus(status int) string { + switch status { + case 0: + return "UNKNOWN" + case 1: + return "INIT" + case 2: + return "IMAGE_CREATING" + case 3: + return "IMAGE_FAILED" + case 4: + return "SUBMIT_TRYING" + case 5: + return "SUBMIT_FAILED" + case 6: + return "DELETE_FAILED" + case 7: + return "WAITING" + case 8: + return "RUNNING" + case 9: + return "KILLING" + case 10: + return "COMPLETED" + case 11: + return "FAILED" + case 12: + return "KILLED" + case 13: + return "CANCELED" + case 14: + return "LOST" + case 15: + return "SCALING" + case 16: + return "SUBMIT_MODEL_FAILED" + case 17: + return "DEPLOY_SERVICE_FAILED" + case 18: + return "CHECK_INIT" + case 19: + return "CHECK_RUNNING" + case 20: + return "CHECK_RUNNING_COMPLETED" + case 21: + return "CHECK_FAILED" + + default: + return strconv.Itoa(status) + } + + return "" +} diff --git a/modules/modelarts/resty.go b/modules/modelarts/resty.go index f91be5e31..d17478c94 100755 --- a/modules/modelarts/resty.go +++ b/modules/modelarts/resty.go @@ -1,13 +1,14 @@ package modelarts import ( - "code.gitea.io/gitea/modules/log" "crypto/tls" "encoding/json" "fmt" "net/http" + "strconv" "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "github.com/go-resty/resty/v2" ) @@ -23,6 +24,9 @@ const ( urlGetToken = "/v3/auth/tokens" urlNotebook = "/demanager/instances" + urlTrainJob = "/training-jobs" + urlResourceSpecs = "/job/resource-specs" + urlTrainJobConfig = "/training-job-configs" errorCodeExceedLimit = "ModelArts.0118" ) @@ -104,7 +108,7 @@ sendjob: Post(HOST + "/v1/" + setting.ProjectID + urlNotebook) if err != nil { - return nil, fmt.Errorf("resty create job: %s", err) + return nil, fmt.Errorf("resty create notebook: %s", err) } if res.StatusCode() == http.StatusUnauthorized && retry < 1 { @@ -121,11 +125,11 @@ sendjob: } if len(response.ErrorCode) != 0 { - log.Error("CreateJob failed(%s): %s", response.ErrorCode, response.ErrorMsg) + log.Error("createNotebook failed(%s): %s", response.ErrorCode, response.ErrorMsg) if response.ErrorCode == errorCodeExceedLimit { response.ErrorMsg = "所选规格使用数量已超过最大配额限制。" } - return &result, fmt.Errorf("CreateJob failed(%s): %s", response.ErrorCode, response.ErrorMsg) + return &result, fmt.Errorf("createNotebook failed(%s): %s", response.ErrorCode, response.ErrorMsg) } return &result, nil @@ -210,6 +214,45 @@ sendjob: return &result, nil } +func DelNotebook(jobID string) (*models.NotebookDelResult, error) { + checkSetting() + client := getRestyClient() + var result models.NotebookDelResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetHeader("Content-Type", "application/json"). + SetAuthToken(TOKEN). + SetResult(&result). + Delete(HOST + "/v1/" + setting.ProjectID + urlNotebook + "/" + jobID) + + if err != nil { + return &result, fmt.Errorf("resty DelJob: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + var response models.NotebookResult + err = json.Unmarshal(res.Body(), &response) + if err != nil { + log.Error("json.Unmarshal failed: %s", err.Error()) + return &result, fmt.Errorf("son.Unmarshal failed: %s", err.Error()) + } + + if len(response.ErrorCode) != 0 { + log.Error("DelJob failed(%s): %s", response.ErrorCode, response.ErrorMsg) + return &result, fmt.Errorf("DelJob failed(%s): %s", response.ErrorCode, response.ErrorMsg) + } + + return &result, nil +} + func DelJob(jobID string) (*models.NotebookDelResult, error) { checkSetting() client := getRestyClient() @@ -287,3 +330,441 @@ sendjob: return &result, nil } + +func createTrainJob(createJobParams models.CreateTrainJobParams) (*models.CreateTrainJobResult, error) { + checkSetting() + client := getRestyClient() + var result models.CreateTrainJobResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetHeader("Content-Type", "application/json"). + SetAuthToken(TOKEN). + SetBody(createJobParams). + SetResult(&result). + Post(HOST + "/v1/" + setting.ProjectID + urlTrainJob) + + if err != nil { + return nil, fmt.Errorf("resty create train-job: %s", err) + } + + req, _ := json.Marshal(createJobParams) + log.Info("%s", req) + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("createTrainJob failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("createTrainJob failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("createTrainJob failed(%s): %s", result.ErrorCode, result.ErrorMsg) + return &result, fmt.Errorf("createTrainJob failed(%s): %s", result.ErrorCode, result.ErrorMsg) + } + + return &result, nil +} + +func GetResourceSpecs() (*models.GetResourceSpecsResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetResourceSpecsResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetHeader("Content-Type", "application/json"). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlResourceSpecs) + + if err != nil { + return nil, fmt.Errorf("resty GetResourceSpecs: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetResourceSpecs failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("GetResourceSpecs failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetResourceSpecs failed(%s): %s", result.ErrorCode, result.ErrorMsg) + return &result, fmt.Errorf("GetResourceSpecs failed(%s): %s", result.ErrorCode, result.ErrorMsg) + } + + return &result, nil +} + +func CreateTrainJobConfig(req models.CreateConfigParams) (*models.CreateTrainJobConfigResult, error) { + checkSetting() + client := getRestyClient() + var result models.CreateTrainJobConfigResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetHeader("Content-Type", "application/json"). + SetAuthToken(TOKEN). + SetBody(req). + SetResult(&result). + Post(HOST + "/v1/" + setting.ProjectID + urlTrainJobConfig) + + if err != nil { + return nil, fmt.Errorf("resty CreateTrainJobConfig: %s", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + //temp, _ := json.Marshal(req) + //log.Info("%s", temp) + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("CreateTrainJobConfig failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("CreateTrainJobConfig failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("CreateTrainJobConfig failed(%s): %s", result.ErrorCode, result.ErrorMsg) + return &result, fmt.Errorf("CreateTrainJobConfig failed(%s): %s", result.ErrorCode, result.ErrorMsg) + } + + return &result, nil +} + +func GetConfigList(perPage, page int, sortBy, order, searchContent, configType string) (*models.GetConfigListResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetConfigListResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetQueryParams(map[string]string{ + "per_page": strconv.Itoa(perPage), + "page": strconv.Itoa(page), + "sortBy": sortBy, + "order": order, + "search_content": searchContent, + "config_type": configType, + }). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJobConfig) + + if err != nil { + return nil, fmt.Errorf("resty GetConfigList: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetConfigList failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("获取参数配置列表失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetConfigList failed(%s): %s", result.ErrorCode, result.ErrorMsg) + return &result, fmt.Errorf("获取参数配置列表失败(%s): %s", result.ErrorCode, result.ErrorMsg) + } + + return &result, nil +} + +func GetParaConfig(configName, configType string) (models.GetConfigResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetConfigResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetQueryParams(map[string]string{ + "config_type": configType, + }). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJobConfig + "/" + configName) + + if err != nil { + return result, fmt.Errorf("resty GetParaConfig: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetParaConfig failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return result, fmt.Errorf("获取参数配置详情失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetParaConfig failed(%s): %s", result.ErrorCode, result.ErrorMsg) + return result, fmt.Errorf("获取参数配置详情失败(%s): %s", result.ErrorCode, result.ErrorMsg) + } + + return result, nil +} + +func GetTrainJob(jobID, versionID string) (*models.GetTrainJobResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetTrainJobResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID + "/versions/" + versionID) + + if err != nil { + return nil, fmt.Errorf("resty GetTrainJob: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetTrainJob failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("获取作业详情失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetTrainJob(%s) failed", jobID) + return &result, fmt.Errorf("获取作业详情失败") + } + + return &result, nil +} + +func GetTrainJobLog(jobID, versionID, baseLine, logFile, order string, lines int) (*models.GetTrainJobLogResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetTrainJobLogResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetQueryParams(map[string]string{ + "base_line": baseLine, + "lines": strconv.Itoa(lines), + "log_file": logFile, + "order": order, + }). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID + "/versions/" + versionID + "/aom-log") + + if err != nil { + return nil, fmt.Errorf("resty GetTrainJobLog: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetTrainJobLog failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("获取作业日志失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetTrainJobLog(%s) failed", jobID) + return &result, fmt.Errorf("获取作业日志失败:%s", result.ErrorMsg) + } + + return &result, nil +} + +func GetTrainJobLogFileNames(jobID, versionID string) (*models.GetTrainJobLogFileNamesResult, error) { + checkSetting() + client := getRestyClient() + var result models.GetTrainJobLogFileNamesResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetAuthToken(TOKEN). + SetResult(&result). + Get(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID + "/versions/" + versionID + "/log/file-names") + + if err != nil { + return nil, fmt.Errorf("resty GetTrainJobLogFileNames: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("GetTrainJobLogFileNames failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("GetTrainJobLogFileNames failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("GetTrainJobLogFileNames(%s) failed", jobID) + return &result, fmt.Errorf("获取作业日志文件失败:%s", result.ErrorMsg) + } + + return &result, nil +} + +func DelTrainJob(jobID string) (*models.TrainJobResult, error) { + checkSetting() + client := getRestyClient() + var result models.TrainJobResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetAuthToken(TOKEN). + SetResult(&result). + Delete(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID) + + if err != nil { + return &result, fmt.Errorf("resty DelTrainJob: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("DelTrainJob failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("删除训练作业失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("DelTrainJob(%s) failed", jobID) + return &result, fmt.Errorf("删除训练作业失败:%s", result.ErrorMsg) + } + + return &result, nil +} + +func StopTrainJob(jobID, versionID string) (*models.TrainJobResult, error) { + checkSetting() + client := getRestyClient() + var result models.TrainJobResult + + retry := 0 + +sendjob: + res, err := client.R(). + SetAuthToken(TOKEN). + SetResult(&result). + Post(HOST + "/v1/" + setting.ProjectID + urlTrainJob + "/" + jobID + "/versions/" + versionID + "/stop") + + if err != nil { + return &result, fmt.Errorf("resty StopTrainJob: %v", err) + } + + if res.StatusCode() == http.StatusUnauthorized && retry < 1 { + retry++ + _ = getToken() + goto sendjob + } + + if res.StatusCode() != http.StatusOK { + var temp models.ErrorResult + if err = json.Unmarshal([]byte(res.String()), &temp); err != nil { + log.Error("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + return &result, fmt.Errorf("json.Unmarshal failed(%s): %v", res.String(), err.Error()) + } + log.Error("StopTrainJob failed(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + return &result, fmt.Errorf("停止训练作业失败(%d):%s(%s)", res.StatusCode(), temp.ErrorCode, temp.ErrorMsg) + } + + if !result.IsSuccess { + log.Error("StopTrainJob(%s) failed", jobID) + return &result, fmt.Errorf("停止训练作业失败:%s", result.ErrorMsg) + } + + return &result, nil +} diff --git a/modules/normalization/normalization.go b/modules/normalization/normalization.go new file mode 100644 index 000000000..ce616d7f8 --- /dev/null +++ b/modules/normalization/normalization.go @@ -0,0 +1,83 @@ +package normalization + +import ( + "code.gitea.io/gitea/modules/setting" +) + +func Normalization(value float64, minValue float64, maxValue float64) float64 { + + min := int64(minValue * 100) + max := int64(maxValue * 100) + + if min == max { + return 100.0 + } else { + return 100 * (value - minValue) / (maxValue - minValue) + } + +} + +func GetRadarValue(impactValue float64, completeValue float64, livenessValue float64, projectHealthValue float64, teamHealthValue float64, growthValue float64) float64 { + return setting.RadarMap.Impact*impactValue + + setting.RadarMap.Completeness*completeValue + + setting.RadarMap.Liveness*livenessValue + + setting.RadarMap.ProjectHealth*projectHealthValue + + setting.RadarMap.TeamHealth*teamHealthValue + + setting.RadarMap.Growth*growthValue + +} + +func GetImpactInitValue(watch int64, star int64, fork int64, download int64, comments int64, browser int64) float64 { + + return setting.RadarMap.ImpactWatch*float64(watch) + + setting.RadarMap.ImpactStar*float64(star) + + setting.RadarMap.ImpactFork*float64(fork) + + setting.RadarMap.ImpactCodeDownload*float64(download)*0.001 + + setting.RadarMap.ImpactComments*float64(comments) + + setting.RadarMap.ImpactBrowser*float64(browser)*0.001 + +} + +func GetCompleteInitValue(issuesClosed int64, releases int64, developAge int64, dataset int64, model int64, wiki int64) float64 { + + return setting.RadarMap.CompletenessIssuesClosed*float64(issuesClosed) + + setting.RadarMap.CompletenessReleases*float64(releases) + + setting.RadarMap.CompletenessDevelopAge*float64(developAge) + + setting.RadarMap.CompletenessDataset*(float64(dataset)/(1024*1024)) + + setting.RadarMap.CompletenessModel*float64(model) + + setting.RadarMap.CompletenessWiki*float64(wiki) + +} + +func GetLivenessInitValue(commits int64, issues int64, pr int64, release int64) float64 { + + return setting.RadarMap.LivenessCommit*float64(commits) + + setting.RadarMap.LivenessIssue*float64(issues) + + setting.RadarMap.LivenessPR*float64(pr) + + setting.RadarMap.LivenessRelease*float64(release) + +} + +func GetProjectHealthInitValue(issueClosedRatio float32) float64 { + + return setting.RadarMap.ProjectHealthIssueCompleteRatio * float64(issueClosedRatio) + +} + +func GetTeamHealthInitValue(contributors int64, keyContributors int64, newContributors int64) float64 { + + return setting.RadarMap.TeamHealthContributors*float64(contributors) + + setting.RadarMap.TeamHealthKeyContributors*float64(keyContributors) + + setting.RadarMap.TeamHealthContributorsAdded*float64(newContributors) + +} + +func GetRepoGrowthInitValue(codelinesGrowth int64, issueGrowth int64, commitsGrowth int64, newContributors int64, commentsGrowth int64) float64 { + + return setting.RadarMap.GrowthCodeLines*float64(codelinesGrowth) + + setting.RadarMap.GrowthIssue*float64(issueGrowth) + + setting.RadarMap.GrowthCommit*float64(commitsGrowth) + + setting.RadarMap.GrowthContributors*float64(newContributors) + + setting.RadarMap.GrowthComments*float64(commentsGrowth) + +} diff --git a/modules/setting/radarmap.go b/modules/setting/radarmap.go new file mode 100644 index 000000000..26624d143 --- /dev/null +++ b/modules/setting/radarmap.go @@ -0,0 +1,7 @@ +package setting + +func UpdateRadarMap() { + Cfg.DeleteSection("radar_map") + Cfg.Reload() + SetRadarMapConfig() +} diff --git a/modules/setting/setting.go b/modules/setting/setting.go index e1e7b7902..eb0e41c90 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -457,18 +457,26 @@ var ( Snn4imagenetCode string Snn4imagenetServerHost string + //snn4imagenet config + IsBrainScoreEnabled bool + BrainScoreCode string + BrainScoreServerHost string + //blockchain config BlockChainHost string CommitValidDate string //obs config - Endpoint string - AccessKeyID string - SecretAccessKey string - Bucket string - Location string - BasePath string - UserBasePath string + Endpoint string + AccessKeyID string + SecretAccessKey string + Bucket string + Location string + BasePath string + OutPutPath string + TrainJobModelPath string + CodePathPrefix string + UserBasePath string //modelarts config ModelArtsHost string @@ -478,10 +486,16 @@ var ( ModelArtsUsername string ModelArtsPassword string ModelArtsDomain string + AllowedOrg string ProfileID string PoolInfos string Flavor string - FlavorInfos string + //train-job + ResourcePools string + Engines string + EngineVersions string + FlavorInfos string + TrainJobFLAVORINFOS string //elk config ElkUrl string @@ -490,6 +504,47 @@ var ( Index string TimeField string ElkTimeFormat string + + //nginx proxy + PROXYURL string + RadarMap = struct { + Impact float64 + ImpactWatch float64 + ImpactStar float64 + ImpactFork float64 + ImpactCodeDownload float64 + ImpactComments float64 + ImpactBrowser float64 + + Completeness float64 + CompletenessIssuesClosed float64 + CompletenessReleases float64 + CompletenessDevelopAge float64 + CompletenessDataset float64 + CompletenessModel float64 + CompletenessWiki float64 + + Liveness float64 + LivenessCommit float64 + LivenessIssue float64 + LivenessPR float64 + LivenessRelease float64 + + ProjectHealth float64 + ProjectHealthIssueCompleteRatio float64 + + TeamHealth float64 + TeamHealthContributors float64 + TeamHealthKeyContributors float64 + TeamHealthContributorsAdded float64 + + Growth float64 + GrowthCodeLines float64 + GrowthIssue float64 + GrowthContributors float64 + GrowthCommit float64 + GrowthComments float64 + }{} ) // DateLang transforms standard language locale name to corresponding value in datetime plugin. @@ -1181,8 +1236,13 @@ func NewContext() { sec = Cfg.Section("snn4imagenet") IsSnn4imagenetEnabled = sec.Key("ENABLED").MustBool(false) - Snn4imagenetCode = sec.Key("SNN4IMAGENETCODE").MustString("https://yangzhx:justfortest123@git.openi.org.cn/yangzhx/detection_benchmark_script.git") - Snn4imagenetServerHost = sec.Key("HOST").MustString("http://192.168.202.90:3366/") + Snn4imagenetCode = sec.Key("SNN4IMAGENETCODE").MustString("https://yult:19910821ylt@git.openi.org.cn/yult/snn4imagenet_script.git") + Snn4imagenetServerHost = sec.Key("HOST").MustString("http://192.168.207.76:8080/") + + sec = Cfg.Section("brainscore") + IsBrainScoreEnabled = sec.Key("ENABLED").MustBool(false) + BrainScoreCode = sec.Key("BRAINSCORECODE").MustString("https://yult:19910821ylt@git.openi.org.cn/yult/brainscore_script.git") + BrainScoreServerHost = sec.Key("HOST").MustString("http://192.168.207.76:8080/") sec = Cfg.Section("blockchain") BlockChainHost = sec.Key("HOST").MustString("http://192.168.136.66:3302/") @@ -1195,7 +1255,11 @@ func NewContext() { Bucket = sec.Key("BUCKET").MustString("testopendata") Location = sec.Key("LOCATION").MustString("cn-south-222") BasePath = sec.Key("BASE_PATH").MustString("attachment/") + TrainJobModelPath = sec.Key("TrainJobModel_Path").MustString("job/") + OutPutPath = sec.Key("Output_Path").MustString("output/") + CodePathPrefix = sec.Key("CODE_PATH_PREFIX").MustString("code/") UserBasePath = sec.Key("BASE_PATH_USER").MustString("users/") + PROXYURL = sec.Key("PROXY_URL").MustString("") sec = Cfg.Section("modelarts") ModelArtsHost = sec.Key("ENDPOINT").MustString("112.95.163.80") @@ -1205,10 +1269,15 @@ func NewContext() { ModelArtsUsername = sec.Key("USERNAME").MustString("") ModelArtsPassword = sec.Key("PASSWORD").MustString("") ModelArtsDomain = sec.Key("DOMAIN").MustString("cn-south-222") + AllowedOrg = sec.Key("ORGANIZATION").MustString("") ProfileID = sec.Key("PROFILE_ID").MustString("") PoolInfos = sec.Key("POOL_INFOS").MustString("") - Flavor = sec.Key("FLAVOR").MustString("") + Flavor = sec.Key("FLAVOR").MustString("modelarts.bm.910.arm.public.2") + ResourcePools = sec.Key("Resource_Pools").MustString("") + Engines = sec.Key("Engines").MustString("") + EngineVersions = sec.Key("Engine_Versions").MustString("") FlavorInfos = sec.Key("FLAVOR_INFOS").MustString("") + TrainJobFLAVORINFOS = sec.Key("TrainJob_FLAVOR_INFOS").MustString("") sec = Cfg.Section("elk") ElkUrl = sec.Key("ELKURL").MustString("http://192.168.207.35:5601/internal/bsearch") @@ -1217,6 +1286,45 @@ func NewContext() { Index = sec.Key("INDEX").MustString("filebeat-7.3.2*") TimeField = sec.Key("TIMEFIELD").MustString(" @timestamptest") ElkTimeFormat = sec.Key("ELKTIMEFORMAT").MustString("date_time") + + SetRadarMapConfig() +} + +func SetRadarMapConfig() { + sec := Cfg.Section("radar_map") + + RadarMap.Impact = sec.Key("impact").MustFloat64(0.3) + RadarMap.ImpactWatch = sec.Key("impact_watch").MustFloat64(0.1) + RadarMap.ImpactStar = sec.Key("impact_star").MustFloat64(0.2) + RadarMap.ImpactFork = sec.Key("impact_fork").MustFloat64(0.3) + RadarMap.ImpactCodeDownload = sec.Key("impact_code_download").MustFloat64(0.2) + RadarMap.ImpactComments = sec.Key("impact_comments").MustFloat64(0.1) + RadarMap.ImpactBrowser = sec.Key("impact_browser").MustFloat64(0.1) + RadarMap.Completeness = sec.Key("completeness").MustFloat64(0.1) + RadarMap.CompletenessIssuesClosed = sec.Key("completeness_issues_closed").MustFloat64(0.2) + RadarMap.CompletenessReleases = sec.Key("completeness_releases").MustFloat64(0.3) + RadarMap.CompletenessDevelopAge = sec.Key("completeness_develop_age").MustFloat64(0.1) + RadarMap.CompletenessDataset = sec.Key("completeness_dataset").MustFloat64(0.1) + RadarMap.CompletenessModel = sec.Key("completeness_model").MustFloat64(0.1) + RadarMap.CompletenessWiki = sec.Key("completeness_wiki").MustFloat64(0.1) + RadarMap.Liveness = sec.Key("liveness").MustFloat64(0.3) + RadarMap.LivenessCommit = sec.Key("liveness_commit").MustFloat64(0.2) + RadarMap.LivenessIssue = sec.Key("liveness_issue").MustFloat64(0.2) + RadarMap.LivenessPR = sec.Key("liveness_pr").MustFloat64(0.2) + RadarMap.LivenessRelease = sec.Key("liveness_release").MustFloat64(0.4) + RadarMap.ProjectHealth = sec.Key("project_health").MustFloat64(0.1) + RadarMap.ProjectHealthIssueCompleteRatio = sec.Key("project_health_issue_complete_ratio").MustFloat64(100) + RadarMap.TeamHealth = sec.Key("team_health").MustFloat64(0.1) + RadarMap.TeamHealthContributors = sec.Key("team_health_contributors").MustFloat64(0.2) + RadarMap.TeamHealthKeyContributors = sec.Key("team_health_key_contributors").MustFloat64(0.6) + RadarMap.TeamHealthContributorsAdded = sec.Key("team_health_contributors_added").MustFloat64(0.2) + RadarMap.Growth = sec.Key("growth").MustFloat64(0.1) + RadarMap.GrowthCodeLines = sec.Key("growth_code_lines").MustFloat64(0.2) + RadarMap.GrowthIssue = sec.Key("growth_issue").MustFloat64(0.2) + RadarMap.GrowthContributors = sec.Key("growth_contributors").MustFloat64(0.2) + RadarMap.GrowthCommit = sec.Key("growth_commit").MustFloat64(0.2) + RadarMap.GrowthComments = sec.Key("growth_comments").MustFloat64(0.2) + } func loadInternalToken(sec *ini.Section) string { diff --git a/modules/storage/obs.go b/modules/storage/obs.go index 77fe49b4e..bd73281d0 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -5,16 +5,27 @@ package storage import ( - "github.com/unknwon/com" + "io" "path" "strconv" "strings" + "github.com/unknwon/com" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" "code.gitea.io/gitea/modules/setting" ) +type FileInfo struct { + FileName string `json:"FileName"` + ModTime string `json:"ModTime"` + IsDir bool `json:"IsDir"` + Size int64 `json:"Size"` + ParenDir string `json:"ParenDir"` + UUID string `json:"UUID"` +} + //check if has the object //todo:修改查询方式 func ObsHasObject(path string) (bool, error) { @@ -102,6 +113,108 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error { return nil } +func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error { + input := &obs.UploadPartInput{} + input.Bucket = setting.Bucket + input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") + input.UploadId = uploadId + input.PartNumber = partNumber + input.Body = putBody + output, err := ObsCli.UploadPart(input) + if err == nil { + log.Info("RequestId:%s\n", output.RequestId) + log.Info("ETag:%s\n", output.ETag) + return nil + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Info(obsError.Code) + log.Info(obsError.Message) + return obsError + } else { + log.Error("error:", err.Error()) + return err + } + } + +} + +func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) { + input := &obs.GetObjectInput{} + input.Bucket = setting.Bucket + input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") + // input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") + output, err := ObsCli.GetObject(input) + if err == nil { + log.Info("StorageClass:%s, ETag:%s, ContentType:%s, ContentLength:%d, LastModified:%s\n", + output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) + return output.Body, nil + } else if obsError, ok := err.(obs.ObsError); ok { + log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) + return nil, obsError + } else { + return nil, err + } +} + +func ObsModelDownload(JobName string, fileName string) (io.ReadCloser, error) { + input := &obs.GetObjectInput{} + input.Bucket = setting.Bucket + input.Key = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, JobName, setting.OutPutPath, fileName), "/") + // input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") + output, err := ObsCli.GetObject(input) + if err == nil { + log.Info("StorageClass:%s, ETag:%s, ContentType:%s, ContentLength:%d, LastModified:%s\n", + output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified) + return output.Body, nil + } else if obsError, ok := err.(obs.ObsError); ok { + log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) + return nil, obsError + } else { + return nil, err + } +} + +func GetObsListObject(jobName, parentDir string) ([]FileInfo, error) { + input := &obs.ListObjectsInput{} + input.Bucket = setting.Bucket + input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir), "/") + output, err := ObsCli.ListObjects(input) + fileInfos := make([]FileInfo, 0) + if err == nil { + for _, val := range output.Contents { + str1 := strings.Split(val.Key, "/") + var isDir bool + var fileName,nextParentDir string + if strings.HasSuffix(val.Key, "/") { + fileName = str1[len(str1)-2] + isDir = true + nextParentDir = fileName + if fileName == parentDir || (fileName + "/") == setting.OutPutPath { + continue + } + } else { + fileName = str1[len(str1)-1] + isDir = false + } + + fileInfo := FileInfo{ + ModTime: val.LastModified.Format("2006-01-02 15:04:05"), + FileName: fileName, + Size: val.Size, + IsDir:isDir, + ParenDir: nextParentDir, + } + fileInfos = append(fileInfos, fileInfo) + } + return fileInfos, err + } else { + if obsError, ok := err.(obs.ObsError); ok { + log.Error("Code:%s, Message:%s", obsError.Code, obsError.Message) + } + return nil, err + } +} + func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, fileName string) (string, error) { input := &obs.CreateSignedUrlInput{} @@ -125,6 +238,26 @@ func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, file return output.SignedUrl, nil } +func GetObsCreateSignedUrl(jobName, parentDir, fileName string) (string, error) { + input := &obs.CreateSignedUrlInput{} + input.Bucket = setting.Bucket + input.Key = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir, fileName), "/") + + input.Expires = 60 * 60 + input.Method = obs.HttpMethodGet + + reqParams := make(map[string]string) + reqParams["response-content-disposition"] = "attachment; filename=\"" + fileName + "\"" + input.QueryParams = reqParams + output, err := ObsCli.CreateSignedUrl(input) + if err != nil { + log.Error("CreateSignedUrl failed:", err.Error()) + return "", err + } + + return output.SignedUrl, nil +} + func ObsGetPreSignedUrl(uuid, fileName string) (string, error) { input := &obs.CreateSignedUrlInput{} input.Method = obs.HttpMethodGet diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 746e46463..7fa8c7a4f 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -755,15 +755,24 @@ unit_disabled = The site administrator has disabled this repository section. language_other = Other datasets = Datasets datasets.desc = Enable Dataset -cloudbrain=cloudbrain + +debug=Debug +stop=Stop +delete=Delete +model_download=Model Download +submit_image=Submit Image +download=Download + + +cloudbrain=Cloudbrain cloudbrain.new=New cloudbrain -cloudbrain.desc=cloudbrain +cloudbrain.desc=Cloudbrain cloudbrain.cancel=Cancel -cloudbrain.commit_image = submit -clone_cnt=download -balance = balance -balance.total_view = total balance -balance.available = available balance: +cloudbrain.commit_image = Submit +clone_cnt=Download +balance = Balance +balance.total_view = Total Balance +balance.available = Available Balance: cloudbrain1 = cloudbrain1 cloudbrain2 = cloudbrain2 cloudbrain_selection = select cloudbrain @@ -774,6 +783,60 @@ cloudbrain_creator=Creator cloudbrain_task = Task Name cloudbrain_operate = Operate cloudbrain_status_createtime = Status/Createtime +cloudbrain_status_runtime = Running Time + + +modelarts.notebook=Debug Task +modelarts.train_job=Train Task +modelarts.train_job.new_debug= New Debug Task +modelarts.train_job.new_train=New Train Task +modelarts.train_job.config=Configuration information +modelarts.train_job.new=New train Task +modelarts.train_job.new_place=The description should not exceed 256 characters + + + +modelarts.train_job.basic_info=Basic Info +modelarts.train_job.job_status=Job Status +modelarts.train_job.job_name=Job Name +modelarts.train_job.version=Job Version +modelarts.train_job.start_time=Start Time +modelarts.train_job.dura_time=Running Time +modelarts.train_job.description=Description +modelarts.train_job.parameter_setting=Parameter setting +modelarts.train_job.parameter_setting_info=Parameter Info +modelarts.train_job.fast_parameter_setting=fast_parameter_setting +modelarts.train_job.fast_parameter_setting_config=fast_parameter_setting_config +modelarts.train_job.fast_parameter_setting_config_link=fast_parameter_setting_config_link +modelarts.train_job.frames=frames +modelarts.train_job.algorithm_origin=Algorithm Origin +modelarts.train_job.AI_driver=AI Engine +modelarts.train_job.start_file=Start File +modelarts.train_job.boot_file_helper=The startup file is the entry file that your program executes, and it must be a file ending in .py +modelarts.train_job.dataset=Dataset +modelarts.train_job.run_parameter=Run Parameter +modelarts.train_job.add_run_parameter=Add Run Parameter +modelarts.train_job.parameter_name=Parameter Name +modelarts.train_job.parameter_value=Parameter Value +modelarts.train_job.resource_setting=resource_setting +modelarts.train_job.resource_setting_info=resource_setting_info +modelarts.train_job.resource_pool=resource_pool +modelarts.train_job.resource_type=resource_type +modelarts.train_job.standard=Standard +modelarts.train_job.NAS_address=NAS Address +modelarts.train_job.NAS_mount_path=NAS Mount Path +modelarts.train_job.query_whether_save_parameter=query_whether_save_parameter +modelarts.train_job.save_helper=save_helper +modelarts.train_job.common_frame=common_frame +modelarts.train_job.amount_of_compute_node=Amount of Compute Node +modelarts.train_job.job_parameter_name=job_parameter_name +modelarts.train_job.parameter_description=parameter_description +modelarts.log=Log +modelarts.version_manage=Version Manage +modelarts.back=Back +modelarts.train_job_para_admin=train_job_para_admin +modelarts.train_job_para.edit=train_job_para.edit +modelarts.train_job_para.connfirm=train_job_para.connfirm template.items = Template Items template.git_content = Git Content (Default Branch) diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 6dc0d410c..81b4a8459 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -757,6 +757,14 @@ unit_disabled=站点管理员已禁用此项目单元。 language_other=其它 datasets=数据集 datasets.desc=数据集功能 + +debug=调试 +stop=停止 +delete=删除 +model_download=模型下载 +submit_image=提交镜像 +download=模型下载 + cloudbrain=云脑 cloudbrain.new=新建任务 cloudbrain.desc=云脑功能 @@ -776,6 +784,62 @@ cloudbrain_creator=创建者 cloudbrain_task=任务名称 cloudbrain_operate=操作 cloudbrain_status_createtime=状态/创建时间 +cloudbrain_status_runtime = 运行时长 +cloudbrain_jobname_err=只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。 + +modelarts.notebook=调试任务 +modelarts.train_job=训练任务 +modelarts.train_job.new_debug=新建调试任务 +modelarts.train_job.new_train=新建训练任务 +modelarts.train_job.config=配置信息 +modelarts.train_job.new=新建训练任务 +modelarts.train_job.new_place=描述字数不超过256个字符 + + + +modelarts.train_job.basic_info=基本信息 +modelarts.train_job.job_status=任务状态 +modelarts.train_job.job_name=任务名称 +modelarts.train_job.version=任务版本 +modelarts.train_job.start_time=开始时间 +modelarts.train_job.dura_time=运行时长 +modelarts.train_job.description=任务描述 +modelarts.train_job.parameter_setting=参数设置 +modelarts.train_job.parameter_setting_info=参数信息 +modelarts.train_job.fast_parameter_setting=一键式参数配置 +modelarts.train_job.fast_parameter_setting_config=如您已保存过参数配置,可单击 +modelarts.train_job.fast_parameter_setting_config_link=这里 +modelarts.train_job.frames=常用框架 +modelarts.train_job.algorithm_origin=算法来源 +modelarts.train_job.AI_driver=AI引擎 +modelarts.train_job.start_file=启动文件 +modelarts.train_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。 +modelarts.train_job.boot_file_place=填写启动文件路径,默认为train.py +modelarts.train_job.dataset=数据集 +modelarts.train_job.run_parameter=运行参数 +modelarts.train_job.add_run_parameter=增加运行参数 +modelarts.train_job.parameter_name=参数名 +modelarts.train_job.parameter_value=参数值 +modelarts.train_job.resource_setting=资源设置 +modelarts.train_job.resource_setting_info=资源信息 +modelarts.train_job.resource_pool=资源池 +modelarts.train_job.resource_type=资源类型 +modelarts.train_job.standard=规格 +modelarts.train_job.NAS_address=NAS地址 +modelarts.train_job.NAS_mount_path=NAS挂载路径 +modelarts.train_job.query_whether_save_parameter=保存作业参数 +modelarts.train_job.save_helper=保存当前作业的配置参数,后续您可以使用已保存的配置参数快速创建训练作业。 +modelarts.train_job.common_frame=常用框架 +modelarts.train_job.amount_of_compute_node=计算节点个数 +modelarts.train_job.job_parameter_name=任务参数名称 +modelarts.train_job.parameter_description=任务参数描述 +modelarts.log=日志 +modelarts.version_manage=版本管理 +modelarts.back=返回 +modelarts.train_job_para_admin=任务参数管理 +modelarts.train_job_para.edit=编辑 +modelarts.train_job_para.connfirm=确定 + template.items=模板选项 template.git_content=Git数据(默认分支) diff --git a/public/img/org-jd@2x-80.jpg b/public/img/org-jd@2x-80.jpg new file mode 100644 index 000000000..4c99c8acc Binary files /dev/null and b/public/img/org-jd@2x-80.jpg differ diff --git a/public/self/labelTaskPage.js b/public/self/labelTaskPage.js index a160c3961..a68dfaadc 100644 --- a/public/self/labelTaskPage.js +++ b/public/self/labelTaskPage.js @@ -41,7 +41,7 @@ function setDataSetTask(){ //dislpayUser(); getLabelPropertyTask(); displayLabelPropertyTask(); - + dataset_sele_Change(""); $(".ui.dataset.modal").modal("show"); } @@ -132,6 +132,7 @@ function setPredictTask(){ get_model_list(); displayModelTask(); + dataset_auto_sele_Change(""); $(".ui.predict.modal").modal("show"); } @@ -197,13 +198,13 @@ function sele_export_Change(sele){ function dataset_sele_Change(sele){ var dataset_listName = $('#dataset_list option:selected').text(); console.log("select dataset_list =" + dataset_listName); - $("#datasetlabeltaskname").attr({value:dataset_listName+"-人工标注"}); + $("#datasetlabeltaskname").val(dataset_listName+"-人工标注"); } function dataset_auto_sele_Change(sele){ var dataset_listName = $('#dataset_list_auto option:selected').text(); console.log("select dataset_list_auto =" + dataset_listName); - $("#autolabeltaskname").attr({value:dataset_listName+"-自动标注"}); + $("#autolabeltaskname").val(dataset_listName+"-自动标注"); } @@ -309,11 +310,11 @@ function label_task_create(task_name, relate_task_id, taskType,assign_user_id,la success:function(res){ console.log(res); if(res.code == 0){ - alert("自动标注任务创建成功!"); + alert("标注任务创建成功!"); createsucced = true; } else{ - alert("创建自动标注任务失败," + res.message); + alert("创建标注任务失败," + res.message); createsucced = false; } }, diff --git a/public/self/test.js b/public/self/test.js new file mode 100644 index 000000000..2839c76ab --- /dev/null +++ b/public/self/test.js @@ -0,0 +1,28 @@ + +function displayDir(uuid){ + console.log('uuid 1=' + uuid); + + var html="