diff --git a/custom/conf/app.ini.sample b/custom/conf/app.ini.sample
index d294c8823..7a4298f6b 100755
--- a/custom/conf/app.ini.sample
+++ b/custom/conf/app.ini.sample
@@ -1141,3 +1141,9 @@ growth_issue=0.2
growth_contributors=0.2
growth_commit=0.2
growth_comments=0.2
+
+
+[grampus]
+USERNAME =
+PASSWORD =
+SERVER_HOST =
diff --git a/models/action.go b/models/action.go
index 9b92b4192..288186b2c 100755
--- a/models/action.go
+++ b/models/action.go
@@ -50,14 +50,16 @@ const (
ActionRejectPullRequest // 22
ActionCommentPull // 23
- ActionUploadAttachment //24
- ActionCreateDebugGPUTask //25
- ActionCreateDebugNPUTask //26
- ActionCreateTrainTask //27
- ActionCreateInferenceTask // 28
- ActionCreateBenchMarkTask //29
- ActionCreateNewModelTask //30
- ActionCreateGPUTrainTask //31
+ ActionUploadAttachment //24
+ ActionCreateDebugGPUTask //25
+ ActionCreateDebugNPUTask //26
+ ActionCreateTrainTask //27
+ ActionCreateInferenceTask // 28
+ ActionCreateBenchMarkTask //29
+ ActionCreateNewModelTask //30
+ ActionCreateGPUTrainTask //31
+ ActionCreateGrampusNPUTrainTask //32
+ ActionCreateGrampusGPUTrainTask //33
)
// Action represents user operation type and other information to
diff --git a/models/attachment.go b/models/attachment.go
index 6fb98a07d..3503dcb73 100755
--- a/models/attachment.go
+++ b/models/attachment.go
@@ -110,8 +110,15 @@ func (a *Attachment) IncreaseDownloadCount() error {
}
func IncreaseAttachmentUseNumber(uuid string) error {
+
+ uuidArray := strings.Split(uuid, ";")
+ for i := range uuidArray {
+ uuidArray[i] = "'" + uuidArray[i] + "'"
+ }
+
+ uuidInCondition := "(" + strings.Join(uuidArray, ",") + ")"
// Update use number.
- if _, err := x.Exec("UPDATE `attachment` SET use_number=use_number+1 WHERE uuid=?", uuid); err != nil {
+ if _, err := x.Exec("UPDATE `attachment` SET use_number=use_number+1 WHERE uuid in " + uuidInCondition); err != nil {
return fmt.Errorf("increase attachment use count: %v", err)
}
@@ -560,6 +567,36 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) {
return total, nil
}
+func AttachmentsByDatasetOption(datasets []int64, opts *SearchDatasetOptions) ([]*Attachment, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+ var cond = builder.NewCond()
+ cond = cond.And(builder.In("attachment.dataset_id", datasets))
+ if opts.JustNeedZipFile {
+ cond = cond.And(builder.Gt{"attachment.decompress_state": 0})
+ }
+ if opts.PublicOnly {
+ cond = cond.And(builder.Eq{"attachment.is_private": false})
+ }
+ if opts.CloudBrainType >= 0 {
+ cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType})
+ }
+ if opts.UploadAttachmentByMe {
+ cond = cond.And(
+ builder.Eq{"attachment.uploader_id": opts.User.ID},
+ )
+ }
+
+
+ attachments := make([]*Attachment, 0)
+ if err := sess.Table(&Attachment{}).Where(cond).Desc("id").
+ Find(&attachments); err != nil {
+ return nil, fmt.Errorf("Find: %v", err)
+ }
+ return attachments, nil
+
+}
+
func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}
diff --git a/models/cloudbrain.go b/models/cloudbrain.go
index 4b4c2c099..8e1b94a97 100755
--- a/models/cloudbrain.go
+++ b/models/cloudbrain.go
@@ -2,6 +2,7 @@ package models
import (
"encoding/json"
+ "errors"
"fmt"
"strconv"
"strings"
@@ -24,7 +25,7 @@ type ModelArtsJobStatus string
const (
TypeCloudBrainOne int = iota
TypeCloudBrainTwo
- TypeIntelligentNet
+ TypeC2Net //智算网络
TypeCloudBrainAll = -1
)
@@ -99,6 +100,15 @@ const (
ModelArtsTrainJobCheckFailed ModelArtsJobStatus = "CHECK_FAILED" //审核作业失败
DURATION_STR_ZERO = "00:00:00"
+
+ //grampus
+ GrampusStatusPending = "pending"
+ GrampusStatusRunning = "RUNNING"
+ GrampusStatusFailed = "FAILED"
+ GrampusStatusSucceeded = "SUCCEEDED"
+ GrampusStatusStopped = "STOPPED"
+ GrampusStatusUnknown = "UNKNOWN"
+ GrampusStatusWaiting = "WAITING"
)
type Cloudbrain struct {
@@ -138,6 +148,8 @@ type Cloudbrain struct {
PreVersionName string //父版本名称
ComputeResource string //计算资源,例如npu
EngineID int64 //引擎id
+ ImageID string //grampus image_id
+ AiCenter string //grampus ai center: center_id+center_name
TrainUrl string //输出模型的obs路径
BranchName string //分支名称
@@ -206,7 +218,7 @@ func ConvertDurationToStr(duration int64) string {
}
func IsTrainJobTerminal(status string) bool {
- return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled)
+ return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) || status == GrampusStatusFailed || status == GrampusStatusStopped || status == GrampusStatusSucceeded
}
func IsModelArtsDebugJobTerminal(status string) bool {
@@ -554,6 +566,17 @@ type FlavorInfo struct {
Desc string `json:"desc"`
}
+type SpecialPools struct {
+ Pools []*SpecialPool `json:"pools"`
+}
+type SpecialPool struct {
+ Org string `json:"org"`
+ Type string `json:"type"`
+ IsExclusive bool `json:"isExclusive"`
+ Pool []*GpuInfo `json:"pool"`
+ JobType []string `json:"jobType"`
+}
+
type ImageInfosModelArts struct {
ImageInfo []*ImageInfoModelArts `json:"image_info"`
}
@@ -977,6 +1000,16 @@ type Parameter struct {
type Parameters struct {
Parameter []Parameter `json:"parameter"`
}
+type Datasurl struct {
+ DatasetUrl string `json:"dataset_url"`
+ DatasetName string `json:"dataset_name"`
+}
+
+type DatasetDownload struct {
+ DatasetName string `json:"dataset_name"`
+ DatasetDownloadLink string `json:"dataset_download_link"`
+ RepositoryLink string `json:"repository_link"`
+}
type DataSource struct {
DatasetID string `json:"dataset_id"`
@@ -1156,6 +1189,88 @@ type LogFile struct {
Name string
}
+//Grampus
+type GrampusResult struct {
+ ErrorCode int `json:"errorCode"`
+ ErrorMsg string `json:"errorMsg"`
+}
+
+type GrampusJobInfo struct {
+ StartedAt int64 `json:"startedAt"`
+ RunSec int64 `json:"runSec"`
+ CompletedAt int64 `json:"completedAt"`
+ CreatedAt int64 `json:"createdAt"`
+ UpdatedAt int64 `json:"updatedAt"`
+ Desc string `json:"desc"`
+ JobID string `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+ UserID string `json:"userId"`
+ Tasks []GrampusTasks `json:"tasks"`
+}
+type Center struct {
+ ID string `json:"id"`
+ Name string `json:"name"`
+}
+type GrampusSpec struct {
+ CreatedAt int64 `json:"createdAt"`
+ UpdatedAt int64 `json:"updatedAt"`
+ ID string `json:"id"`
+ Name string `json:"name"`
+ ProcessorType string `json:"processorType"`
+ Centers []Center `json:"centers"`
+}
+
+type GetGrampusResourceSpecsResult struct {
+ GrampusResult
+ Infos []GrampusSpec `json:"resourceSpecs"`
+}
+
+type GrampusImage struct {
+ CreatedAt int64 `json:"createdAt"`
+ UpdatedAt int64 `json:"updatedAt"`
+ ID string `json:"id"`
+ Name string `json:"name"`
+ ProcessorType string `json:"processorType"`
+}
+
+type GetGrampusImagesResult struct {
+ GrampusResult
+ TotalSize int `json:"totalSize"`
+ Infos []GrampusImage `json:"images"`
+}
+
+type CreateGrampusJobResponse struct {
+ GrampusResult
+ JobInfo GrampusJobInfo `json:"otJob"`
+}
+
+type GetGrampusJobResponse struct {
+ GrampusResult
+ JobInfo GrampusJobInfo `json:"otJob"`
+}
+
+type GrampusStopJobResponse struct {
+ GrampusResult
+ StoppedAt int64 `json:"stoppedAt"`
+}
+
+type GrampusTasks struct {
+ Command string `json:"command"`
+ Name string `json:"name"`
+ ImageId string `json:"imageId"`
+ ResourceSpecId string `json:"resourceSpecId"`
+ ImageUrl string `json:"imageUrl"`
+ CenterID []string `json:"centerID"`
+ CenterName []string `json:"centerName"`
+ ReplicaNum int `json:"replicaNum"`
+}
+
+type CreateGrampusJobRequest struct {
+ Name string `json:"name"`
+ Tasks []GrampusTasks `json:"tasks"`
+}
+
type GetTrainJobMetricStatisticResult struct {
TrainJobResult
Interval int `json:"interval"` //查询的时间间隔,单位为分钟
@@ -1201,6 +1316,12 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {
)
}
+ if len(opts.ComputeResource) > 0 {
+ cond = cond.And(
+ builder.Eq{"cloudbrain.compute_resource": opts.ComputeResource},
+ )
+ }
+
if len(opts.JobTypes) > 0 {
if opts.JobTypeNot {
cond = cond.And(
@@ -1456,6 +1577,11 @@ func GetCloudbrainByJobID(jobID string) (*Cloudbrain, error) {
return getRepoCloudBrain(cb)
}
+func GetCloudbrainByJobIDWithDeleted(jobID string) (*Cloudbrain, error) {
+ cb := &Cloudbrain{JobID: jobID}
+ return getRepoCloudBrainWithDeleted(cb)
+}
+
func GetCloudbrainByID(id string) (*Cloudbrain, error) {
idInt64, _ := strconv.ParseInt(id, 10, 64)
cb := &Cloudbrain{ID: idInt64}
@@ -1634,6 +1760,11 @@ func GetCloudbrainInferenceJobCountByUserID(userID int64) (int, error) {
return int(count), err
}
+func GetGrampusCountByUserID(userID int64, jobType, computeResource string) (int, error) {
+ count, err := x.In("status", GrampusStatusWaiting, GrampusStatusRunning).And("job_type = ? and user_id = ? and type = ?", jobType, userID, TypeC2Net).And("compute_resource = ?", computeResource).Count(new(Cloudbrain))
+ return int(count), err
+}
+
func UpdateInferenceJob(job *Cloudbrain) error {
return updateInferenceJob(x, job)
}
@@ -1839,3 +1970,51 @@ func CloudbrainAllStatic(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, er
}
return cloudbrains, count, nil
}
+
+type DatasetInfo struct {
+ DataLocalPath string
+ Name string
+}
+
+func GetDatasetInfo(uuidStr string) (map[string]DatasetInfo, string, error) {
+ var datasetNames string
+ uuids := strings.Split(uuidStr, ";")
+ if len(uuids) > setting.MaxDatasetNum {
+ log.Error("the dataset count(%d) exceed the limit", len(uuids))
+ return nil, datasetNames, errors.New("the dataset count exceed the limit")
+ }
+
+ datasetInfos := make(map[string]DatasetInfo)
+ attachs, err := GetAttachmentsByUUIDs(uuids)
+ if err != nil {
+ log.Error("GetAttachmentsByUUIDs failed: %v", err)
+ return nil, datasetNames, err
+ }
+ for i, attach := range attachs {
+ fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
+ for _, datasetInfo := range datasetInfos {
+ if fileName == datasetInfo.Name {
+ log.Error("the dataset name is same: %v", attach.Name)
+ return nil, datasetNames, errors.New("the dataset name is same")
+ }
+ }
+
+ dataLocalPath := setting.Attachment.Minio.RealPath +
+ setting.Attachment.Minio.Bucket + "/" +
+ setting.Attachment.Minio.BasePath +
+ AttachmentRelativePath(attach.UUID) +
+ attach.UUID
+
+ datasetInfos[attach.UUID] = DatasetInfo{
+ DataLocalPath: dataLocalPath,
+ Name: fileName,
+ }
+ if i == 0 {
+ datasetNames = attach.Name
+ } else {
+ datasetNames += ";" + attach.Name
+ }
+ }
+
+ return datasetInfos, datasetNames, nil
+}
diff --git a/models/cloudbrain_image.go b/models/cloudbrain_image.go
old mode 100644
new mode 100755
index f72c6a27c..71f0c2c94
--- a/models/cloudbrain_image.go
+++ b/models/cloudbrain_image.go
@@ -68,6 +68,7 @@ type SearchImageOptions struct {
IncludeCustom bool
IncludeOwnerOnly bool
Topics string
+ CloudbrainType int
ListOptions
SearchOrderBy
}
@@ -411,6 +412,10 @@ func SearchImageCondition(opts *SearchImageOptions) builder.Cond {
}
+ if opts.CloudbrainType > 0 {
+ cond = cond.And(builder.Eq{"cloudbrain_type": opts.CloudbrainType})
+ }
+
return cond
}
diff --git a/models/custom_migrations.go b/models/custom_migrations.go
old mode 100644
new mode 100755
index 412bedce1..65b53f0f4
--- a/models/custom_migrations.go
+++ b/models/custom_migrations.go
@@ -15,13 +15,9 @@ type CustomMigrationStatic struct {
Migrate func(*xorm.Engine, *xorm.Engine) error
}
-var customMigrations = []CustomMigration{
- {"Custom v1 Topic struct change to support chinese", syncTopicStruct},
-}
+var customMigrations []CustomMigration
-var customMigrationsStatic = []CustomMigrationStatic{
- {"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate},
-}
+var customMigrationsStatic []CustomMigrationStatic
func MigrateCustom(x *xorm.Engine) {
diff --git a/models/dataset.go b/models/dataset.go
index b7186ac0b..7f049f068 100755
--- a/models/dataset.go
+++ b/models/dataset.go
@@ -81,12 +81,14 @@ func (datasets DatasetList) loadAttributes(e Engine) error {
if err := e.
Where("id > 0").
In("id", keysInt64(userIdSet)).
+ Cols("id", "lower_name", "name", "full_name", "email").
Find(&users); err != nil {
return fmt.Errorf("find users: %v", err)
}
if err := e.
Where("id > 0").
In("id", keysInt64(set)).
+ Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias","is_private").
Find(&repos); err != nil {
return fmt.Errorf("find repos: %v", err)
}
@@ -98,19 +100,94 @@ func (datasets DatasetList) loadAttributes(e Engine) error {
return nil
}
+func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) error {
+ if len(datasets) == 0 {
+ return nil
+ }
+ datasetIDs := make([]int64, len(datasets))
+ for i := range datasets {
+ datasetIDs[i] = datasets[i].ID
+ }
+ attachments, err := AttachmentsByDatasetOption(datasetIDs, opts)
+ if err != nil {
+ return fmt.Errorf("GetAttachmentsByDatasetIds failed error: %v", err)
+ }
+
+ permissionMap := make(map[int64]bool, len(datasets))
+
+ for _, attachment := range attachments {
+
+ for i := range datasets {
+ if attachment.DatasetID == datasets[i].ID {
+ if opts.StarByMe {
+
+ permission,ok := permissionMap[datasets[i].ID];
+ if !ok {
+
+ permission = false
+ datasets[i].Repo.GetOwner()
+ if datasets[i].Repo.Owner.IsOrganization() {
+ if datasets[i].Repo.Owner.IsUserPartOfOrg(opts.User.ID) {
+ log.Info("user is member of org.")
+ permission = true
+ }
+ }
+ if !permission {
+ isCollaborator, _ := datasets[i].Repo.IsCollaborator(opts.User.ID)
+ if isCollaborator {
+ log.Info("Collaborator user may visit the attach.")
+ permission = true
+ }
+ }
+
+ permissionMap[datasets[i].ID]=permission
+ }
+
+ if permission{
+ datasets[i].Attachments = append(datasets[i].Attachments, attachment)
+ } else if !attachment.IsPrivate {
+ datasets[i].Attachments = append(datasets[i].Attachments, attachment)
+ }
+ } else {
+ datasets[i].Attachments = append(datasets[i].Attachments, attachment)
+ }
+
+ }
+
+ }
+
+ }
+
+ for i := range datasets {
+ if datasets[i].Attachments==nil{
+ datasets[i].Attachments=[]*Attachment{}
+ }
+ datasets[i].Repo.Owner = nil
+ }
+ return nil
+
+}
+
type SearchDatasetOptions struct {
Keyword string
OwnerID int64
+ User *User
RepoID int64
IncludePublic bool
RecommendOnly bool
Category string
Task string
License string
- DatasetIDs []int64
+ DatasetIDs []int64 // 目前只在StarByMe为true时起作用
ListOptions
SearchOrderBy
- IsOwner bool
+ IsOwner bool
+ StarByMe bool
+ CloudBrainType int //0 cloudbrain 1 modelarts -1 all
+ PublicOnly bool
+ JustNeedZipFile bool
+ NeedAttachment bool
+ UploadAttachmentByMe bool
}
func CreateDataset(dataset *Dataset) (err error) {
@@ -159,29 +236,40 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
if opts.RepoID > 0 {
cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID})
}
- if opts.IncludePublic {
+
+ if opts.PublicOnly {
+ cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
+ cond = cond.And(builder.Eq{"attachment.is_private": false})
+ } else if opts.IncludePublic {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
cond = cond.And(builder.Eq{"attachment.is_private": false})
if opts.OwnerID > 0 {
-
subCon := builder.NewCond()
subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID})
subCon = generateFilterCond(opts, subCon)
cond = cond.Or(subCon)
}
- } else if opts.OwnerID > 0 {
+ } else if opts.OwnerID > 0 && !opts.StarByMe && !opts.UploadAttachmentByMe {
cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})
if !opts.IsOwner {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
cond = cond.And(builder.Eq{"attachment.is_private": false})
}
}
-
if len(opts.DatasetIDs) > 0 {
- subCon := builder.NewCond()
- subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs))
- cond = cond.Or(subCon)
+ if opts.StarByMe {
+ cond = cond.And(builder.In("dataset.id", opts.DatasetIDs))
+ } else {
+ subCon := builder.NewCond()
+ subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs))
+ subCon = generateFilterCond(opts, subCon)
+ cond = cond.Or(subCon)
+ }
+ } else {
+ if opts.StarByMe {
+ cond = cond.And(builder.Eq{"dataset.id": -1})
+ }
}
return cond
@@ -207,6 +295,17 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C
cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly})
}
+ if opts.JustNeedZipFile {
+ cond = cond.And(builder.Gt{"attachment.decompress_state": 0})
+ }
+
+ if opts.CloudBrainType >= 0 {
+ cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType})
+ }
+ if opts.UploadAttachmentByMe {
+ cond = cond.And(builder.Eq{"attachment.uploader_id": opts.User.ID})
+ }
+
return cond
}
@@ -233,7 +332,6 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da
sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id").
Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
Where(cond).OrderBy(opts.SearchOrderBy.String())
-
if opts.PageSize > 0 {
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
}
@@ -245,6 +343,12 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da
return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
}
+ if opts.NeedAttachment {
+ if err = datasets.loadAttachmentAttributes(opts); err != nil {
+ return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
+ }
+ }
+
return datasets, count, nil
}
@@ -361,10 +465,22 @@ func UpdateDataset(ctx DBContext, rel *Dataset) error {
func IncreaseDatasetUseCount(uuid string) {
IncreaseAttachmentUseNumber(uuid)
+ attachments, _ := GetAttachmentsByUUIDs(strings.Split(uuid, ";"))
+
+ countMap := make(map[int64]int)
+
+ for _, attachment := range attachments {
+ value, ok := countMap[attachment.DatasetID]
+ if ok {
+ countMap[attachment.DatasetID] = value + 1
+ } else {
+ countMap[attachment.DatasetID] = 1
+ }
+
+ }
- attachment, _ := GetAttachmentByUUID(uuid)
- if attachment != nil {
- x.Exec("UPDATE `dataset` SET use_count=use_count+1 WHERE id=?", attachment.DatasetID)
+ for key, value := range countMap {
+ x.Exec("UPDATE `dataset` SET use_count=use_count+? WHERE id=?", value, key)
}
}
@@ -460,5 +576,12 @@ func GetCollaboratorDatasetIdsByUserID(userID int64) []int64 {
_ = x.Table("dataset").Join("INNER", "collaboration", "dataset.repo_id = collaboration.repo_id and collaboration.mode>0 and collaboration.user_id=?", userID).
Cols("dataset.id").Find(&datasets)
return datasets
+}
+func GetTeamDatasetIdsByUserID(userID int64) []int64 {
+ var datasets []int64
+ _ = x.Table("dataset").Join("INNER", "team_repo", "dataset.repo_id = team_repo.repo_id").
+ Join("INNER", "team_user", "team_repo.team_id=team_user.team_id and team_user.uid=?", userID).
+ Cols("dataset.id").Find(&datasets)
+ return datasets
}
diff --git a/models/dataset_star.go b/models/dataset_star.go
index 4b22c2855..2cbd9dc8d 100644
--- a/models/dataset_star.go
+++ b/models/dataset_star.go
@@ -68,3 +68,10 @@ func isDatasetStaring(e Engine, userID, datasetID int64) bool {
has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0})
return has
}
+
+func GetDatasetIdsStarByUser(userID int64) []int64 {
+ var datasets []int64
+ _ = x.Table("dataset_star").Where("uid=?", userID).
+ Cols("dataset_star.dataset_id").Find(&datasets)
+ return datasets
+}
diff --git a/models/repo.go b/models/repo.go
index db2694617..4770e5415 100755
--- a/models/repo.go
+++ b/models/repo.go
@@ -2749,15 +2749,10 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
log.Error("ReadLatestFileInRepo: Close: %v", err)
}
}()
-
- buf := make([]byte, 1024)
- n, _ := reader.Read(buf)
- if n >= 0 {
- buf = buf[:n]
- }
+ d, _ := ioutil.ReadAll(reader)
commitId := ""
if blob != nil {
commitId = fmt.Sprint(blob.ID)
}
- return &RepoFile{CommitId: commitId, Content: buf}, nil
+ return &RepoFile{CommitId: commitId, Content: d}, nil
}
diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go
index 4cd3539d7..e058c0df8 100644
--- a/models/user_business_analysis.go
+++ b/models/user_business_analysis.go
@@ -955,6 +955,8 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
return err
}
userNewAddActivity := make(map[int64]map[int64]int64)
+ userAcitvateJsonMap := make(map[int64]map[int64]int64)
+ userCurrentDayRegistMap := make(map[int64]map[int64]int64)
ParaWeight := getParaWeight()
userMetrics := make(map[string]int)
var indexTotal int64
@@ -1028,7 +1030,10 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("has activity." + userRecord.Name)
addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID)
}
-
+ if userRecord.IsActive {
+ addUserToMap(userAcitvateJsonMap, userRecord.CreatedUnix, dateRecord.ID)
+ }
+ addUserToMap(userCurrentDayRegistMap, userRecord.CreatedUnix, dateRecord.ID)
}
indexTotal += PAGE_SIZE
@@ -1064,36 +1069,61 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
}
statictisSess.Insert(&useMetrics)
//update new user activity
- updateNewUserAcitivity(userNewAddActivity, statictisSess)
+ updateNewUserAcitivity(userNewAddActivity, userAcitvateJsonMap, userCurrentDayRegistMap, statictisSess)
return nil
}
-func updateNewUserAcitivity(currentUserActivity map[int64]map[int64]int64, statictisSess *xorm.Session) {
- for key, value := range currentUserActivity {
+func updateNewUserAcitivity(currentUserActivity map[int64]map[int64]int64, userAcitvateJsonMap map[int64]map[int64]int64, userCurrentDayRegistMap map[int64]map[int64]int64, statictisSess *xorm.Session) {
+ for key, value := range userCurrentDayRegistMap {
useMetrics := &UserMetrics{CountDate: key}
+ userAcitvateValue := userAcitvateJsonMap[key]
+ HuodongValue := currentUserActivity[key]
has, err := statictisSess.Get(useMetrics)
if err == nil && has {
- userIdArrays := strings.Split(useMetrics.HasActivityUserJson, ",")
- for _, userIdStr := range userIdArrays {
- userIdInt, err := strconv.ParseInt(userIdStr, 10, 64)
- if err == nil {
- value[userIdInt] = userIdInt
- }
- }
- userIdArray := ""
- for _, tmpValue := range value {
- userIdArray += fmt.Sprint(tmpValue) + ","
- }
- useMetrics.HasActivityUser = len(value)
- if len(userIdArray) > 0 {
- useMetrics.HasActivityUserJson = userIdArray[0 : len(userIdArray)-1]
- }
- updateSql := "update public.user_metrics set has_activity_user_json='" + useMetrics.HasActivityUserJson + "',regist_activity_user=" + fmt.Sprint(useMetrics.HasActivityUser) + " where count_date=" + fmt.Sprint(key)
+ ActivityUserArray, HuodongTotal := setUniqueUserId(useMetrics.HasActivityUserJson, HuodongValue)
+ useMetrics.HasActivityUser = HuodongTotal
+ useMetrics.HasActivityUserJson = ActivityUserArray
+
+ useMetrics.CurrentDayRegistUser = len(value)
+
+ RegistUserArray, lenRegistUser := setUniqueUserId(useMetrics.ActivityUserJson, userAcitvateValue)
+ useMetrics.ActivityUserJson = RegistUserArray
+ useMetrics.ActivateRegistUser = lenRegistUser
+
+ updateSql := "update public.user_metrics set has_activity_user_json='" + useMetrics.HasActivityUserJson +
+ "',regist_activity_user=" + fmt.Sprint(useMetrics.HasActivityUser) +
+ ",activity_user_json='" + useMetrics.ActivityUserJson + "'" +
+ ",activate_regist_user=" + fmt.Sprint(useMetrics.ActivateRegistUser) +
+ ",not_activate_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser-useMetrics.ActivateRegistUser) +
+ ",current_day_regist_user=" + fmt.Sprint(useMetrics.CurrentDayRegistUser) +
+ " where count_date=" + fmt.Sprint(key)
+
statictisSess.Exec(updateSql)
}
}
}
+func setUniqueUserId(jsonString string, value map[int64]int64) (string, int) {
+ if value == nil {
+ value = make(map[int64]int64, 0)
+ }
+ userIdArrays := strings.Split(jsonString, ",")
+ for _, userIdStr := range userIdArrays {
+ userIdInt, err := strconv.ParseInt(userIdStr, 10, 64)
+ if err == nil {
+ value[userIdInt] = userIdInt
+ }
+ }
+ userIdArray := ""
+ for _, tmpValue := range value {
+ userIdArray += fmt.Sprint(tmpValue) + ","
+ }
+ if len(userIdArray) > 0 {
+ return userIdArray[0 : len(userIdArray)-1], len(value)
+ }
+ return userIdArray, len(value)
+}
+
func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate timeutil.TimeStamp, userId int64) {
CountDateTime := time.Date(registDate.Year(), registDate.AsTime().Month(), registDate.AsTime().Day(), 0, 1, 0, 0, registDate.AsTime().Location())
CountDate := CountDateTime.Unix()
@@ -1104,7 +1134,6 @@ func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate time
} else {
currentUserActivity[CountDate][userId] = userId
}
-
}
func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, end_time int64, dateRecord UserBusinessAnalysis) {
diff --git a/models/user_business_struct.go b/models/user_business_struct.go
index fec361bca..870a64bc7 100644
--- a/models/user_business_struct.go
+++ b/models/user_business_struct.go
@@ -467,11 +467,11 @@ type UserAnalysisPara struct {
type UserMetrics struct {
CountDate int64 `xorm:"pk"`
- ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
- NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
- ActivateIndex float64 `xorm:"NOT NULL DEFAULT 0"`
- RegistActivityUser int `xorm:"NOT NULL DEFAULT 0"`
- HasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
+ ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` //当天激活用户
+ NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` //当天未激活用户
+ ActivateIndex float64 `xorm:"NOT NULL DEFAULT 0"` //激活比率
+ RegistActivityUser int `xorm:"NOT NULL DEFAULT 0"` //当天注册激活的人中,有贡献活动的人
+ HasActivityUser int `xorm:"NOT NULL DEFAULT 0"` //当天有贡献活动的人
TotalUser int `xorm:"NOT NULL DEFAULT 0"`
TotalRegistUser int `xorm:"-"`
TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
@@ -480,5 +480,7 @@ type UserMetrics struct {
DisplayDate string `xorm:"-"`
DataDate string `xorm:"NULL"`
DaysForMonth int `xorm:"NOT NULL DEFAULT 0"`
- HasActivityUserJson string `xorm:"text NULL"`
+ HasActivityUserJson string `xorm:"text NULL"` //贡献活动用户列表
+ ActivityUserJson string `xorm:"text NULL"` //激活用户列表
+ CurrentDayRegistUser int `xorm:"NOT NULL DEFAULT 0"` //当天注册用户
}
diff --git a/modules/auth/grampus.go b/modules/auth/grampus.go
new file mode 100755
index 000000000..ebf0defde
--- /dev/null
+++ b/modules/auth/grampus.go
@@ -0,0 +1,26 @@
+package auth
+
+import (
+ "gitea.com/macaron/binding"
+ "gitea.com/macaron/macaron"
+)
+
+type CreateGrampusTrainJobForm struct {
+ DisplayJobName string `form:"display_job_name" binding:"Required"`
+ JobName string `form:"job_name" binding:"Required"`
+ Attachment string `form:"attachment" binding:"Required"`
+ BootFile string `form:"boot_file" binding:"Required"`
+ ImageID string `form:"image_id" binding:"Required"`
+ FlavorID string `form:"flavor" binding:"Required"`
+ Params string `form:"run_para_list" binding:"Required"`
+ Description string `form:"description"`
+ BranchName string `form:"branch_name" binding:"Required"`
+ FlavorName string `form:"flavor_name" binding:"Required"`
+ EngineName string `form:"engine_name" binding:"Required"`
+ WorkServerNumber int `form:"work_server_number" binding:"Required"`
+ Image string `form:"image"`
+}
+
+func (f *CreateGrampusTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
+ return validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/modules/auth/wechat/auto_reply.go b/modules/auth/wechat/auto_reply.go
new file mode 100644
index 000000000..440f6de6a
--- /dev/null
+++ b/modules/auth/wechat/auto_reply.go
@@ -0,0 +1,139 @@
+package wechat
+
+import (
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "encoding/json"
+ "github.com/patrickmn/go-cache"
+ "strings"
+ "time"
+)
+
+var WechatReplyCache = cache.New(2*time.Minute, 1*time.Minute)
+
+const (
+ WECHAT_REPLY_CACHE_KEY = "wechat_response"
+)
+
+const (
+ ReplyTypeText = "text"
+ ReplyTypeImage = "image"
+ ReplyTypeVoice = "voice"
+ ReplyTypeVideo = "video"
+ ReplyTypeMusic = "music"
+ ReplyTypeNews = "news"
+)
+
+type ReplyConfigType string
+
+const (
+ SubscribeReply ReplyConfigType = "subscribe"
+ AutoMsgReply ReplyConfigType = "autoMsg"
+)
+
+func (r ReplyConfigType) Name() string {
+ switch r {
+ case SubscribeReply:
+ return "subscribe"
+ case AutoMsgReply:
+ return "autoMsg"
+ }
+ return ""
+}
+
+func (r ReplyConfigType) TreePath() string {
+ switch r {
+ case SubscribeReply:
+ return setting.TreePathOfSubscribe
+ case AutoMsgReply:
+ return setting.TreePathOfAutoMsgReply
+ }
+ return ""
+}
+
+type WechatReplyContent struct {
+ Reply *ReplyContent
+ ReplyType string
+ KeyWords []string
+ IsFullMatch int
+}
+
+type ReplyContent struct {
+ Content string
+ MediaId string
+ Title string
+ Description string
+ MusicUrl string
+ HQMusicUrl string
+ ThumbMediaId string
+ Articles []ArticlesContent
+}
+
+func GetAutomaticReply(msg string) *WechatReplyContent {
+ r, err := LoadReplyFromCacheAndDisk(AutoMsgReply)
+ if err != nil {
+ return nil
+ }
+ if r == nil || len(r) == 0 {
+ return nil
+ }
+ for i := 0; i < len(r); i++ {
+ if r[i].IsFullMatch == 0 {
+ for _, v := range r[i].KeyWords {
+ if strings.Contains(msg, v) {
+ return r[i]
+ }
+ }
+ } else if r[i].IsFullMatch > 0 {
+ for _, v := range r[i].KeyWords {
+ if msg == v {
+ return r[i]
+ }
+ }
+ }
+ }
+ return nil
+
+}
+
+func loadReplyFromDisk(replyConfig ReplyConfigType) ([]*WechatReplyContent, error) {
+ log.Info("LoadReply from disk")
+ repo, err := models.GetRepositoryByOwnerAndAlias(setting.UserNameOfWechatReply, setting.RepoNameOfWechatReply)
+ if err != nil {
+ log.Error("get AutomaticReply repo failed, error=%v", err)
+ return nil, err
+ }
+ repoFile, err := models.ReadLatestFileInRepo(setting.UserNameOfWechatReply, repo.Name, setting.RefNameOfWechatReply, replyConfig.TreePath())
+ if err != nil {
+ log.Error("get AutomaticReply failed, error=%v", err)
+ return nil, err
+ }
+ res := make([]*WechatReplyContent, 0)
+ json.Unmarshal(repoFile.Content, &res)
+ if res == nil || len(res) == 0 {
+ return nil, err
+ }
+ return res, nil
+}
+
+func LoadReplyFromCacheAndDisk(replyConfig ReplyConfigType) ([]*WechatReplyContent, error) {
+ v, success := WechatReplyCache.Get(replyConfig.Name())
+ if success {
+ log.Info("LoadReply from cache,value = %v", v)
+ if v == nil {
+ return nil, nil
+ }
+ n := v.([]*WechatReplyContent)
+ return n, nil
+ }
+
+ content, err := loadReplyFromDisk(replyConfig)
+ if err != nil {
+ log.Error("LoadReply failed, error=%v", err)
+ WechatReplyCache.Set(replyConfig.Name(), nil, 30*time.Second)
+ return nil, err
+ }
+ WechatReplyCache.Set(replyConfig.Name(), content, 60*time.Second)
+ return content, nil
+}
diff --git a/modules/auth/wechat/client.go b/modules/auth/wechat/client.go
index 6734977a1..9ed4b543f 100644
--- a/modules/auth/wechat/client.go
+++ b/modules/auth/wechat/client.go
@@ -17,7 +17,8 @@ var (
const (
GRANT_TYPE = "client_credential"
ACCESS_TOKEN_PATH = "/cgi-bin/token"
- QR_CODE_Path = "/cgi-bin/qrcode/create"
+ QR_CODE_PATH = "/cgi-bin/qrcode/create"
+ GET_MATERIAL_PATH = "/cgi-bin/material/batchget_material"
ACTION_QR_STR_SCENE = "QR_STR_SCENE"
ERR_CODE_ACCESSTOKEN_EXPIRE = 42001
@@ -40,6 +41,11 @@ type QRCodeRequest struct {
Action_info ActionInfo `json:"action_info"`
Expire_seconds int `json:"expire_seconds"`
}
+type MaterialRequest struct {
+ Type string `json:"type"`
+ Offset int `json:"offset"`
+ Count int `json:"count"`
+}
type ActionInfo struct {
Scene Scene `json:"scene"`
@@ -97,7 +103,7 @@ func callQRCodeCreate(sceneStr string) (*QRCodeResponse, bool) {
SetQueryParam("access_token", GetWechatAccessToken()).
SetBody(bodyJson).
SetResult(&result).
- Post(setting.WechatApiHost + QR_CODE_Path)
+ Post(setting.WechatApiHost + QR_CODE_PATH)
if err != nil {
log.Error("create QR code failed,e=%v", err)
return nil, false
@@ -113,6 +119,37 @@ func callQRCodeCreate(sceneStr string) (*QRCodeResponse, bool) {
return &result, false
}
+//getMaterial
+// api doc: https://developers.weixin.qq.com/doc/offiaccount/Asset_Management/Get_materials_list.html
+func getMaterial(mType string, offset, count int) (interface{}, bool) {
+ client := getWechatRestyClient()
+
+ body := &MaterialRequest{
+ Type: mType,
+ Offset: offset,
+ Count: count,
+ }
+ bodyJson, _ := json.Marshal(body)
+ r, err := client.R().
+ SetHeader("Content-Type", "application/json").
+ SetQueryParam("access_token", GetWechatAccessToken()).
+ SetBody(bodyJson).
+ Post(setting.WechatApiHost + GET_MATERIAL_PATH)
+ if err != nil {
+ log.Error("create QR code failed,e=%v", err)
+ return nil, false
+ }
+ a := r.Body()
+ resultMap := make(map[string]interface{}, 0)
+ json.Unmarshal(a, &resultMap)
+ errcode := resultMap["errcode"]
+ if errcode == fmt.Sprint(ERR_CODE_ACCESSTOKEN_EXPIRE) || errcode == fmt.Sprint(ERR_CODE_ACCESSTOKEN_INVALID) {
+ return nil, true
+ }
+ log.Info("%v", r)
+ return &resultMap, false
+}
+
func getErrorCodeFromResponse(r *resty.Response) int {
a := r.Body()
resultMap := make(map[string]interface{}, 0)
diff --git a/modules/auth/wechat/event_handle.go b/modules/auth/wechat/event_handle.go
index b40ab3101..27edf7343 100644
--- a/modules/auth/wechat/event_handle.go
+++ b/modules/auth/wechat/event_handle.go
@@ -18,7 +18,7 @@ import (
//
+ {{$.i18n.Tr "repo.cloudbrain_task"}} + | +
+
+ {{.DisplayJobName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.status"}} + | + +
+
+ {{.Status}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.run_version"}} + | + +
+
+ {{.VersionName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.start_time"}} + | + +
+
+
+ {{if not (eq .StartTime 0)}}
+ {{TimeSinceUnix1 .StartTime}}
+ {{else}}
+ {{TimeSinceUnix1 .CreatedUnix}}
+ {{end}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.dura_time"}} + | + +
+
+ {{.TrainJobDuration}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.standard"}} + | + +
+
+ {{.FlavorName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.compute_node"}} + | +
+
+ {{.WorkServerNumber}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.AI_driver"}} + | +
+
+ {{.EngineName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.code_version"}} + | + +
+
+ {{.BranchName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.start_file"}} + | + +
+
+ {{.BootFile}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.train_dataset"}} + | + +
+
+ {{.DatasetName}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}} + | + +
+
+ {{.Parameters}}
+
+ |
+
+ {{$.i18n.Tr "repo.grampus.train_job.ai_center"}} + | + +
+
+ {{$.ai_center}}
+
+ |
+
+ {{$.i18n.Tr "repo.modelarts.train_job.description"}} + | + +
+
+ {{.Description}}
+
+ |
+
{{.i18n.Tr "cloudbrain.task_delete_confirm"}}
+