Browse Source

Merge branch 'V20211101' of git.openi.org.cn:OpenI/aiforge into brainscore

pull/611/head
lewis 3 years ago
parent
commit
40878360ca
34 changed files with 1009 additions and 142 deletions
  1. +38
    -0
      custom/conf/app.ini.sample
  2. +4
    -0
      models/attachment.go
  3. +42
    -18
      models/cloudbrain.go
  4. +8
    -1
      models/dataset.go
  5. +1
    -0
      models/models.go
  6. +9
    -0
      models/repo.go
  7. +80
    -27
      models/repo_statistic.go
  8. +69
    -0
      models/summary_statistic.go
  9. +8
    -1
      models/topic.go
  10. +12
    -0
      models/user.go
  11. +67
    -3
      models/user_business_analysis.go
  12. +1
    -1
      modules/base/tool.go
  13. +2
    -0
      modules/context/context.go
  14. +11
    -0
      modules/cron/tasks_basic.go
  15. +83
    -0
      modules/normalization/normalization.go
  16. +77
    -0
      modules/setting/setting.go
  17. +47
    -1
      modules/storage/obs.go
  18. +48
    -0
      options/locale/locale_zh-CN.ini
  19. BIN
      public/img/org-jd@2x-80.jpg
  20. +2
    -2
      public/self/labelTaskPage.js
  21. +1
    -1
      routers/home.go
  22. +66
    -10
      routers/repo/attachment.go
  23. +31
    -11
      routers/repo/cloudbrain.go
  24. +5
    -2
      routers/repo/modelarts.go
  25. +167
    -39
      routers/repo/repo_statistic.go
  26. +94
    -0
      routers/repo/repo_summary_statistic.go
  27. +9
    -5
      routers/routes/routes.go
  28. +6
    -2
      templates/explore/dataset_list.tmpl
  29. +2
    -2
      templates/repo/cloudbrain/index.tmpl
  30. +3
    -3
      templates/repo/cloudbrain/new.tmpl
  31. +1
    -1
      templates/repo/datasets/dataset_list.tmpl
  32. +1
    -1
      templates/repo/datasets/label/index.tmpl
  33. +1
    -1
      templates/repo/modelarts/new.tmpl
  34. +13
    -10
      web_src/js/components/EditTopics.vue

+ 38
- 0
custom/conf/app.ini.sample View File

@@ -1102,3 +1102,41 @@ PROJECT_NAME = cn-south-222_test
USERNAME = test1
PASSWORD = Qizhi@test.
DOMAIN = cn-south-222

[radar_map]
impact=0.3
impact_watch=0.1
impact_star=0.3
impact_fork=0.3
impact_code_download=0.2
impact_comments=0.1
impact_browser=0.1

completeness=0.1
completeness_issues_closed=0.2
completeness_releases=0.3
completeness_develop_age=0.1
completeness_dataset=0.1
completeness_model=0.1
completeness_wiki=0.1

liveness=0.3
liveness_commit=0.2
liveness_issue=0.2
liveness_pr=0.2
liveness_release=0.4

project_health=0.1
project_health_issue_complete_ratio=100

team_health=0.1
team_health_contributors=0.2
team_health_key_contributors=0.6
team_health_contributors_added=0.2

growth=0.1
growth_code_lines=0.2
growth_issue=0.2
growth_contributors=0.2
growth_commit=0.2
growth_comments=0.2

+ 4
- 0
models/attachment.go View File

@@ -473,3 +473,7 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) {

return total, nil
}

func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}

+ 42
- 18
models/cloudbrain.go View File

@@ -151,23 +151,42 @@ type TaskPod struct {
TaskRoleStatus struct {
Name string `json:"name"`
} `json:"taskRoleStatus"`
TaskStatuses []struct {
TaskIndex int `json:"taskIndex"`
PodUID string `json:"podUid"`
PodIP string `json:"podIp"`
PodName string `json:"podName"`
ContainerID string `json:"containerId"`
ContainerIP string `json:"containerIp"`
ContainerGpus string `json:"containerGpus"`
State string `json:"state"`
StartAt time.Time `json:"startAt"`
FinishedAt time.Time `json:"finishedAt"`
ExitCode int `json:"exitCode"`
ExitDiagnostics string `json:"exitDiagnostics"`
RetriedCount int `json:"retriedCount"`
StartTime string
FinishedTime string
} `json:"taskStatuses"`
//TaskStatuses []struct {
// TaskIndex int `json:"taskIndex"`
// PodUID string `json:"podUid"`
// PodIP string `json:"podIp"`
// PodName string `json:"podName"`
// ContainerID string `json:"containerId"`
// ContainerIP string `json:"containerIp"`
// ContainerGpus string `json:"containerGpus"`
// State string `json:"state"`
// StartAt time.Time `json:"startAt"`
// FinishedAt time.Time `json:"finishedAt"`
// ExitCode int `json:"exitCode"`
// ExitDiagnostics string `json:"exitDiagnostics"`
// RetriedCount int `json:"retriedCount"`
// StartTime string
// FinishedTime string
//} `json:"taskStatuses"`
TaskStatuses []TaskStatuses `json:"taskStatuses"`
}

type TaskStatuses struct {
TaskIndex int `json:"taskIndex"`
PodUID string `json:"podUid"`
PodIP string `json:"podIp"`
PodName string `json:"podName"`
ContainerID string `json:"containerId"`
ContainerIP string `json:"containerIp"`
ContainerGpus string `json:"containerGpus"`
State string `json:"state"`
StartAt time.Time `json:"startAt"`
FinishedAt time.Time `json:"finishedAt"`
ExitCode int `json:"exitCode"`
ExitDiagnostics string `json:"exitDiagnostics"`
RetriedCount int `json:"retriedCount"`
StartTime string
FinishedTime string
}

type TaskInfo struct {
@@ -255,6 +274,11 @@ func ConvertToJobResultPayload(input map[string]interface{}) (JobResultPayload,
err := json.Unmarshal(data, &jobResultPayload)
jobResultPayload.JobStatus.StartTime = time.Unix(jobResultPayload.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05")
jobResultPayload.JobStatus.EndTime = time.Unix(jobResultPayload.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05")

if jobResultPayload.JobStatus.State == string(JobWaiting) {
jobResultPayload.JobStatus.StartTime = "-"
jobResultPayload.JobStatus.EndTime = "-"
}
return jobResultPayload, err
}

@@ -674,7 +698,7 @@ func GetCloudbrainByName(jobName string) (*Cloudbrain, error) {
}

func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool {
if !isSigned || job.Status != string(JobStopped) {
if !isSigned || (job.Status != string(JobStopped) && job.Status != string(JobFailed) && job.Status != string(ModelArtsStartFailed) && job.Status != string(ModelArtsCreateFailed)){
return false
}
repo, err := GetRepositoryByID(job.RepoID)


+ 8
- 1
models/dataset.go View File

@@ -139,7 +139,14 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
if opts.IncludePublic {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
if opts.OwnerID > 0 {
cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID})
if len(opts.Keyword) == 0 {
cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID})
} else {
subCon := builder.NewCond()
subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword})
cond = cond.Or(subCon)

}
}
} else if opts.OwnerID > 0 {
cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})


+ 1
- 0
models/models.go View File

@@ -137,6 +137,7 @@ func init() {

tablesStatistic = append(tablesStatistic,
new(RepoStatistic),
new(SummaryStatistic),
new(UserBusinessAnalysis),
)



+ 9
- 0
models/repo.go View File

@@ -1430,6 +1430,15 @@ func GetAllRepositoriesByFilterCols(columns ...string) ([]*Repository, error) {

}

func GetAllRepositoriesCount() (int64, error) {
repo := new(Repository)
return x.Count(repo)
}

func GetAllRepositoriesSize() (int64, error) {
return x.SumInt(&Repository{}, "size")
}

func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err error) {
repo.LowerName = strings.ToLower(repo.Name)



+ 80
- 27
models/repo_statistic.go View File

@@ -1,38 +1,62 @@
package models

import (
"code.gitea.io/gitea/modules/timeutil"
"fmt"
"time"

"code.gitea.io/gitea/modules/timeutil"
)

// RepoStatistic statistic info of all repository
type RepoStatistic struct {
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"unique(s) NOT NULL"`
Date string `xorm:"unique(s) NOT NULL"`
NumWatches int64 `xorm:"NOT NULL DEFAULT 0"`
NumStars int64 `xorm:"NOT NULL DEFAULT 0"`
NumForks int64 `xorm:"NOT NULL DEFAULT 0"`
NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"`
NumComments int64 `xorm:"NOT NULL DEFAULT 0"`
NumVisits int64 `xorm:"NOT NULL DEFAULT 0"`
NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"`
NumVersions int64 `xorm:"NOT NULL DEFAULT 0"`
//develop months
NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"`
RepoSize int64 `xorm:"NOT NULL DEFAULT 0"`
DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"`
NumModels int64 `xorm:"NOT NULL DEFAULT 0"`
NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommits int64 `xorm:"NOT NULL DEFAULT 0"`
NumIssues int64 `xorm:"NOT NULL DEFAULT 0"`
NumPulls int64 `xorm:"NOT NULL DEFAULT 0"`
IssueFixedRate float32 `xorm:"NOT NULL"`
NumContributor int64 `xorm:"NOT NULL DEFAULT 0"`
NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"`

CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
ID int64 `xorm:"pk autoincr"`
RepoID int64 `xorm:"unique(s) NOT NULL"`
Date string `xorm:"unique(s) NOT NULL"`
NumWatches int64 `xorm:"NOT NULL DEFAULT 0"`
NumWatchesAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumStars int64 `xorm:"NOT NULL DEFAULT 0"`
NumStarsAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumForks int64 `xorm:"NOT NULL DEFAULT 0"`
NumForksAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"`
NumDownloadsAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumComments int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommentsAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumVisits int64 `xorm:"NOT NULL DEFAULT 0"`
NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"`
NumClosedIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumVersions int64 `xorm:"NOT NULL DEFAULT 0"`
NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"`
RepoSize int64 `xorm:"NOT NULL DEFAULT 0"`
DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"`
NumModels int64 `xorm:"NOT NULL DEFAULT 0"`
NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommits int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommitsAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumIssues int64 `xorm:"NOT NULL DEFAULT 0"`
NumIssuesAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumPulls int64 `xorm:"NOT NULL DEFAULT 0"`
NumPullsAdded int64 `xorm:"NOT NULL DEFAULT 0"`
IssueFixedRate float32 `xorm:"NOT NULL"`
NumContributor int64 `xorm:"NOT NULL DEFAULT 0"`
NumContributorAdded int64 `xorm:"NOT NULL DEFAULT 0"`
NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"`

NumContributorsGrowth int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommitsGrowth int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommitLinesGrowth int64 `xorm:"NOT NULL DEFAULT 0"`
NumIssuesGrowth int64 `xorm:"NOT NULL DEFAULT 0"`
NumCommentsGrowth int64 `xorm:"NOT NULL DEFAULT 0"`

Impact float64 `xorm:"NOT NULL DEFAULT 0"`
Completeness float64 `xorm:"NOT NULL DEFAULT 0"`
Liveness float64 `xorm:"NOT NULL DEFAULT 0"`
ProjectHealth float64 `xorm:"NOT NULL DEFAULT 0"`
TeamHealth float64 `xorm:"NOT NULL DEFAULT 0"`
Growth float64 `xorm:"NOT NULL DEFAULT 0"`
RadarTotal float64 `xorm:"NOT NULL DEFAULT 0"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

func DeleteRepoStatDaily(date string) error {
@@ -55,6 +79,35 @@ func DeleteRepoStatDaily(date string) error {
return nil
}

func GetRepoStatisticByDate(date string) ([]*RepoStatistic, error) {
repoStatistics := make([]*RepoStatistic, 0)
err := xStatistic.Where("date = ?", date).Find(&repoStatistics)
return repoStatistics, err

}

func GetOneRepoStatisticBeforeTime(time time.Time) (*RepoStatistic, error) {
repoStatistics := make([]*RepoStatistic, 0)
err := xStatistic.Where("created_unix >= ?", time.Unix()).OrderBy("created_unix").Limit(1).Find(&repoStatistics)
if err != nil {
return nil, err
} else {
if len(repoStatistics) == 0 {
return nil, fmt.Errorf("the repo statistic record count is 0")
} else {
return repoStatistics[0], nil
}
}

}

func InsertRepoStat(repoStat *RepoStatistic) (int64, error) {
return xStatistic.Insert(repoStat)
}

func UpdateRepoStat(repoStat *RepoStatistic) error {
sql := "update repo_statistic set impact=?,completeness=?,liveness=?,project_health=?,team_health=?,growth=?,radar_total=? where repo_id=? and date=?"

_, err := xStatistic.Exec(sql, repoStat.Impact, repoStat.Completeness, repoStat.Liveness, repoStat.ProjectHealth, repoStat.TeamHealth, repoStat.Growth, repoStat.RadarTotal, repoStat.RepoID, repoStat.Date)
return err
}

+ 69
- 0
models/summary_statistic.go View File

@@ -0,0 +1,69 @@
package models

import (
"fmt"

"code.gitea.io/gitea/modules/timeutil"
)

var DomainMap = map[string]int{
"大模型": 0,
"ai开发工具": 1,
"计算机视觉": 2,
"自然语言处理": 3,
"机器学习": 4,
"神经网络": 5,
"自动驾驶": 6,
"机器人": 7,
"联邦学习": 8,
"数据挖掘": 9,
"risc-v开发": 10,
}

type SummaryStatistic struct {
ID int64 `xorm:"pk autoincr"`
Date string `xorm:"unique(s) NOT NULL"`
NumUsers int64 `xorm:"NOT NULL DEFAULT 0"`
RepoSize int64 `xorm:"NOT NULL DEFAULT 0"`
DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"`
NumOrganizations int64 `xorm:"NOT NULL DEFAULT 0"`
NumModels int64 `xorm:"NOT NULL DEFAULT 0"`
NumRepos int64 `xorm:"NOT NULL DEFAULT 0"`
NumRepoBigModel int `xorm:"NOT NULL DEFAULT 0"`
NumRepoAI int `xorm:"NOT NULL DEFAULT 0"`
NumRepoVision int `xorm:"NOT NULL DEFAULT 0"`
NumRepoNLP int `xorm:"NOT NULL DEFAULT 0"`
NumRepoML int `xorm:"NOT NULL DEFAULT 0"`
NumRepoNN int `xorm:"NOT NULL DEFAULT 0"`
NumRepoAutoDrive int `xorm:"NOT NULL DEFAULT 0"`
NumRepoRobot int `xorm:"NOT NULL DEFAULT 0"`
NumRepoLeagueLearn int `xorm:"NOT NULL DEFAULT 0"`
NumRepoDataMining int `xorm:"NOT NULL DEFAULT 0"`
NumRepoRISC int `xorm:"NOT NULL DEFAULT 0"`
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
}

func DeleteSummaryStatisticDaily(date string) error {
sess := xStatistic.NewSession()
defer sess.Close()
if err := sess.Begin(); err != nil {
return fmt.Errorf("Begin: %v", err)
}

if _, err := sess.Where("date = ?", date).Delete(&SummaryStatistic{}); err != nil {
return fmt.Errorf("Delete: %v", err)
}

if err := sess.Commit(); err != nil {
sess.Close()
return fmt.Errorf("Commit: %v", err)
}

sess.Close()
return nil
}

func InsertSummaryStatistic(summaryStatistic *SummaryStatistic) (int64, error) {
return xStatistic.Insert(summaryStatistic)
}

+ 8
- 1
models/topic.go View File

@@ -98,6 +98,13 @@ func GetTopicByName(name string) (*Topic, error) {
return &topic, nil
}

func GetAllUsedTopics() ([]*Topic, error) {
topics := make([]*Topic, 0)
err := x.Where("repo_count > ?", 0).Find(&topics)
return topics, err

}

// addTopicByNameToRepo adds a topic name to a repo and increments the topic count.
// Returns topic after the addition
func addTopicByNameToRepo(e Engine, repoID int64, topicName string) (*Topic, error) {
@@ -178,7 +185,7 @@ func (opts *FindTopicOptions) toConds() builder.Cond {
}

if opts.Keyword != "" {
cond = cond.And(builder.Like{"topic.name", opts.Keyword})
cond = cond.And(builder.Like{"topic.name", strings.ToLower(opts.Keyword)})
}

return cond


+ 12
- 0
models/user.go View File

@@ -2071,6 +2071,18 @@ func SyncExternalUsers(ctx context.Context, updateExisting bool) error {
return nil
}

func GetUsersCount() (int64, error) {
user := new(User)
return x.Where("type=0").Count(user)

}

func GetOrganizationsCount() (int64, error) {
user := new(User)
return x.Where("type=1").Count(user)

}

func GetBlockChainUnSuccessUsers() ([]*User, error) {
users := make([]*User, 0, 10)
err := x.Where("public_key = ''").


+ 67
- 3
models/user_business_analysis.go View File

@@ -71,6 +71,49 @@ type UserBusinessAnalysis struct {
Name string `xorm:"NOT NULL"`
}

func QueryUserStaticData(startTime int64, endTime int64) []*UserBusinessAnalysis {
log.Info("query startTime =" + fmt.Sprint(startTime) + " endTime=" + fmt.Sprint(endTime))
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()

statictisSess.Select("*").Table("user_business_analysis").Where(" count_date>=" + fmt.Sprint(startTime) + " and count_date<=" + fmt.Sprint(endTime)).OrderBy("count_date desc")

userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
statictisSess.Find(&userBusinessAnalysisList)

resultMap := make(map[int64]*UserBusinessAnalysis)
log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList)))
for _, userRecord := range userBusinessAnalysisList {
if _, ok := resultMap[userRecord.ID]; !ok {
resultMap[userRecord.ID] = userRecord
} else {
resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount
resultMap[userRecord.ID].CommitCount += userRecord.CommitCount
resultMap[userRecord.ID].IssueCount += userRecord.IssueCount
resultMap[userRecord.ID].CommentCount += userRecord.CommentCount
resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount
resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount
resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
resultMap[userRecord.ID].LoginCount += userRecord.LoginCount
}
}

userBusinessAnalysisReturnList := make([]*UserBusinessAnalysis, len(resultMap))
index := 0
for _, v := range resultMap {
userBusinessAnalysisReturnList[index] = v
index += 1
}
log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList)))
return userBusinessAnalysisReturnList
}

func CountData(wikiCountMap map[string]int) {
log.Info("start to count other user info data")
sess := x.NewSession()
@@ -92,7 +135,7 @@ func CountData(wikiCountMap map[string]int) {

CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location())

CodeMergeCountMap := queryAction(start_unix, end_unix, 11)
CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryAction(start_unix, end_unix, 5)
IssueCountMap := queryAction(start_unix, end_unix, 10)

@@ -223,6 +266,28 @@ func querySolveIssue(start_unix int64, end_unix int64) map[int64]int {

}

func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()

sess.Select("issue.*").Table("issue").
Join("inner", "pull_request", "issue.id=pull_request.issue_id").
Where("pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix))

issueList := make([]*Issue, 0)
sess.Find(&issueList)
resultMap := make(map[int64]int)
log.Info("query issue(PR) size=" + fmt.Sprint(len(issueList)))
for _, issueRecord := range issueList {
if _, ok := resultMap[issueRecord.PosterID]; !ok {
resultMap[issueRecord.PosterID] = 1
} else {
resultMap[issueRecord.PosterID] += 1
}
}
return resultMap
}

func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
@@ -341,7 +406,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int {
func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
sess.Select("id,owner_id,name").Table("repository").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix))
sess.Select("id,owner_id,name").Table("repository").Where("is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix))
repoList := make([]*Repository, 0)
sess.Find(&repoList)
resultMap := make(map[int64]int)
@@ -354,7 +419,6 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int {
}
}
return resultMap

}

func subMonth(t1, t2 time.Time) (month int) {


+ 1
- 1
modules/base/tool.go View File

@@ -224,7 +224,7 @@ func SizedAvatarLinkWithDomain(email string, size int) string {

// FileSize calculates the file size and generate user-friendly string.
func FileSize(s int64) string {
return humanize.IBytes(uint64(s))
return humanize.Bytes(uint64(s))
}

// PrettyNumber produces a string form of the given number in base 10 with


+ 2
- 0
modules/context/context.go View File

@@ -310,9 +310,11 @@ func Contexter() macaron.Handler {
ctx.Data["SignedUserID"] = ctx.User.ID
ctx.Data["SignedUserName"] = ctx.User.Name
ctx.Data["IsAdmin"] = ctx.User.IsAdmin
c.Data["SignedUserName"] = ctx.User.Name
} else {
ctx.Data["SignedUserID"] = int64(0)
ctx.Data["SignedUserName"] = ""
c.Data["SignedUserName"] = ""
}

// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.


+ 11
- 0
modules/cron/tasks_basic.go View File

@@ -174,6 +174,16 @@ func registerHandleRepoStatistic() {
})
}

func registerHandleSummaryStatistic() {
RegisterTaskFatal("handle_summary_statistic", &BaseConfig{
Enabled: true,
RunAtStart: false,
Schedule: "@daily",
}, func(ctx context.Context, _ *models.User, _ Config) error {
repo.SummaryStatistic()
return nil
})
}
func registerHandleUserStatistic() {
RegisterTaskFatal("handle_user_statistic", &BaseConfig{
Enabled: true,
@@ -202,4 +212,5 @@ func initBasicTasks() {

registerHandleRepoStatistic()
registerHandleUserStatistic()
registerHandleSummaryStatistic()
}

+ 83
- 0
modules/normalization/normalization.go View File

@@ -0,0 +1,83 @@
package normalization

import (
"code.gitea.io/gitea/modules/setting"
)

func Normalization(value float64, minValue float64, maxValue float64) float64 {

min := int64(minValue * 100)
max := int64(maxValue * 100)

if min == max {
return 100.0
} else {
return 100 * (value - minValue) / (maxValue - minValue)
}

}

func GetRadarValue(impactValue float64, completeValue float64, livenessValue float64, projectHealthValue float64, teamHealthValue float64, growthValue float64) float64 {
return setting.RadarMap.Impact*impactValue +
setting.RadarMap.Completeness*completeValue +
setting.RadarMap.Liveness*livenessValue +
setting.RadarMap.ProjectHealth*projectHealthValue +
setting.RadarMap.TeamHealth*teamHealthValue +
setting.RadarMap.Growth*growthValue

}

func GetImpactInitValue(watch int64, star int64, fork int64, download int64, comments int64, browser int64) float64 {

return setting.RadarMap.ImpactWatch*float64(watch) +
setting.RadarMap.ImpactStar*float64(star) +
setting.RadarMap.ImpactFork*float64(fork) +
setting.RadarMap.ImpactCodeDownload*float64(download)*0.001 +
setting.RadarMap.ImpactComments*float64(comments) +
setting.RadarMap.ImpactBrowser*float64(browser)

}

func GetCompleteInitValue(issuesClosed int64, releases int64, developAge int64, dataset int64, model int64, wiki int64) float64 {

return setting.RadarMap.CompletenessIssuesClosed*float64(issuesClosed) +
setting.RadarMap.CompletenessReleases*float64(releases) +
setting.RadarMap.CompletenessDevelopAge*float64(developAge) +
setting.RadarMap.CompletenessDataset*float64(dataset/(1024*1024)) +
setting.RadarMap.CompletenessModel*float64(model) +
setting.RadarMap.CompletenessWiki*float64(wiki)

}

func GetLivenessInitValue(commits int64, issues int64, pr int64, release int64) float64 {

return setting.RadarMap.LivenessCommit*float64(commits) +
setting.RadarMap.LivenessIssue*float64(issues) +
setting.RadarMap.LivenessPR*float64(pr) +
setting.RadarMap.LivenessRelease*float64(release)

}

func GetProjectHealthInitValue(issueClosedRatio float32) float64 {

return setting.RadarMap.ProjectHealthIssueCompleteRatio * float64(issueClosedRatio)

}

func GetTeamHealthInitValue(contributors int64, keyContributors int64, newContributors int64) float64 {

return setting.RadarMap.TeamHealthContributors*float64(contributors) +
setting.RadarMap.TeamHealthKeyContributors*float64(keyContributors) +
setting.RadarMap.TeamHealthContributorsAdded*float64(newContributors)

}

func GetRepoGrowthInitValue(codelinesGrowth int64, issueGrowth int64, commitsGrowth int64, newContributors int64, commentsGrowth int64) float64 {

return setting.RadarMap.GrowthCodeLines*float64(codelinesGrowth) +
setting.RadarMap.GrowthIssue*float64(issueGrowth) +
setting.RadarMap.GrowthCommit*float64(commitsGrowth) +
setting.RadarMap.GrowthContributors*float64(newContributors) +
setting.RadarMap.GrowthComments*float64(commentsGrowth)

}

+ 77
- 0
modules/setting/setting.go View File

@@ -495,6 +495,47 @@ var (
Index string
TimeField string
ElkTimeFormat string

//nginx proxy
PROXYURL string
RadarMap = struct {
Impact float64
ImpactWatch float64
ImpactStar float64
ImpactFork float64
ImpactCodeDownload float64
ImpactComments float64
ImpactBrowser float64

Completeness float64
CompletenessIssuesClosed float64
CompletenessReleases float64
CompletenessDevelopAge float64
CompletenessDataset float64
CompletenessModel float64
CompletenessWiki float64

Liveness float64
LivenessCommit float64
LivenessIssue float64
LivenessPR float64
LivenessRelease float64

ProjectHealth float64
ProjectHealthIssueCompleteRatio float64

TeamHealth float64
TeamHealthContributors float64
TeamHealthKeyContributors float64
TeamHealthContributorsAdded float64

Growth float64
GrowthCodeLines float64
GrowthIssue float64
GrowthContributors float64
GrowthCommit float64
GrowthComments float64
}{}
)

// DateLang transforms standard language locale name to corresponding value in datetime plugin.
@@ -1206,6 +1247,7 @@ func NewContext() {
Location = sec.Key("LOCATION").MustString("cn-south-222")
BasePath = sec.Key("BASE_PATH").MustString("attachment/")
UserBasePath = sec.Key("BASE_PATH_USER").MustString("users/")
PROXYURL = sec.Key("PROXY_URL").MustString("")

sec = Cfg.Section("modelarts")
ModelArtsHost = sec.Key("ENDPOINT").MustString("112.95.163.80")
@@ -1227,6 +1269,41 @@ func NewContext() {
Index = sec.Key("INDEX").MustString("filebeat-7.3.2*")
TimeField = sec.Key("TIMEFIELD").MustString(" @timestamptest")
ElkTimeFormat = sec.Key("ELKTIMEFORMAT").MustString("date_time")

sec = Cfg.Section("radar_map")

RadarMap.Impact = sec.Key("impact").MustFloat64(0.3)
RadarMap.ImpactWatch = sec.Key("impact_watch").MustFloat64(0.1)
RadarMap.ImpactStar = sec.Key("impact_star").MustFloat64(0.3)
RadarMap.ImpactFork = sec.Key("impact_fork").MustFloat64(0.3)
RadarMap.ImpactCodeDownload = sec.Key("impact_code_download").MustFloat64(0.2)
RadarMap.ImpactComments = sec.Key("impact_comments").MustFloat64(0.1)
RadarMap.ImpactBrowser = sec.Key("impact_browser").MustFloat64(0.1)
RadarMap.Completeness = sec.Key("completeness").MustFloat64(0.1)
RadarMap.CompletenessIssuesClosed = sec.Key("completeness_issues_closed").MustFloat64(0.2)
RadarMap.CompletenessReleases = sec.Key("completeness_releases").MustFloat64(0.3)
RadarMap.CompletenessDevelopAge = sec.Key("completeness_develop_age").MustFloat64(0.1)
RadarMap.CompletenessDataset = sec.Key("completeness_dataset").MustFloat64(0.1)
RadarMap.CompletenessModel = sec.Key("completeness_model").MustFloat64(0.1)
RadarMap.CompletenessWiki = sec.Key("completeness_wiki").MustFloat64(0.1)
RadarMap.Liveness = sec.Key("liveness").MustFloat64(0.3)
RadarMap.LivenessCommit = sec.Key("liveness_commit").MustFloat64(0.2)
RadarMap.LivenessIssue = sec.Key("liveness_issue").MustFloat64(0.2)
RadarMap.LivenessPR = sec.Key("liveness_pr").MustFloat64(0.2)
RadarMap.LivenessRelease = sec.Key("liveness_release").MustFloat64(0.4)
RadarMap.ProjectHealth = sec.Key("project_health").MustFloat64(0.1)
RadarMap.ProjectHealthIssueCompleteRatio = sec.Key("project_health_issue_complete_ratio").MustFloat64(100)
RadarMap.TeamHealth = sec.Key("team_health").MustFloat64(0.1)
RadarMap.TeamHealthContributors = sec.Key("team_health_contributors").MustFloat64(0.2)
RadarMap.TeamHealthKeyContributors = sec.Key("team_health_key_contributors").MustFloat64(0.6)
RadarMap.TeamHealthContributorsAdded = sec.Key("team_health_contributors_added").MustFloat64(0.2)
RadarMap.Growth = sec.Key("growth").MustFloat64(0.1)
RadarMap.GrowthCodeLines = sec.Key("growth_code_lines").MustFloat64(0.2)
RadarMap.GrowthIssue = sec.Key("growth_issue").MustFloat64(0.2)
RadarMap.GrowthContributors = sec.Key("growth_contributors").MustFloat64(0.2)
RadarMap.GrowthCommit = sec.Key("growth_commit").MustFloat64(0.2)
RadarMap.GrowthComments = sec.Key("growth_comments").MustFloat64(0.2)

}

func loadInternalToken(sec *ini.Section) string {


+ 47
- 1
modules/storage/obs.go View File

@@ -5,11 +5,14 @@
package storage

import (
"github.com/unknwon/com"
"fmt"
"io"
"path"
"strconv"
"strings"

"github.com/unknwon/com"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/obs"
"code.gitea.io/gitea/modules/setting"
@@ -102,6 +105,49 @@ func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error {
return nil
}

func ObsMultiPartUpload(uuid string, uploadId string, partNumber int, fileName string, putBody io.ReadCloser) error {
input := &obs.UploadPartInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.UploadId = uploadId
input.PartNumber = partNumber
input.Body = putBody
output, err := ObsCli.UploadPart(input)
if err == nil {
log.Info("RequestId:%s\n", output.RequestId)
log.Info("ETag:%s\n", output.ETag)
return nil
} else {
if obsError, ok := err.(obs.ObsError); ok {
log.Info(obsError.Code)
log.Info(obsError.Message)
return obsError
} else {
log.Error("error:", err.Error())
return err
}
}

}

func ObsDownload(uuid string, fileName string) (io.ReadCloser, error) {
input := &obs.GetObjectInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
output, err := ObsCli.GetObject(input)
if err == nil {
log.Info("StorageClass:%s, ETag:%s, ContentType:%s, ContentLength:%d, LastModified:%s\n",
output.StorageClass, output.ETag, output.ContentType, output.ContentLength, output.LastModified)
return output.Body, nil
} else if obsError, ok := err.(obs.ObsError); ok {
fmt.Printf("Code:%s\n", obsError.Code)
fmt.Printf("Message:%s\n", obsError.Message)
return nil, obsError
} else {
return nil, err
}
}

func ObsGenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, fileName string) (string, error) {

input := &obs.CreateSignedUrlInput{}


+ 48
- 0
options/locale/locale_zh-CN.ini View File

@@ -776,6 +776,54 @@ cloudbrain_creator=创建者
cloudbrain_task=任务名称
cloudbrain_operate=操作
cloudbrain_status_createtime=状态/创建时间
cloudbrain_jobname_err=只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。

modelarts.notebook=调试作业
modelarts.train_job=训练作业
modelarts.train_job.new=新建作业
modelarts.train_job.basic_info=基本信息
modelarts.train_job.job_status=作业状态
modelarts.train_job.job_name=作业名称
modelarts.train_job.version=作业版本
modelarts.train_job.start_time=开始时间
modelarts.train_job.dura_time=持续时间
modelarts.train_job.description=作业描述
modelarts.train_job.parameter_setting=参数设置
modelarts.train_job.parameter_setting_info=参数信息
modelarts.train_job.fast_parameter_setting=一键式参数配置
modelarts.train_job.fast_parameter_setting_config=如您已保存过参数配置,可单击
modelarts.train_job.fast_parameter_setting_config_link=这里
modelarts.train_job.frames=常用框架
modelarts.train_job.algorithm_origin=算法来源
modelarts.train_job.AI_driver=AI引擎
modelarts.train_job.start_file=启动文件
modelarts.train_job.boot_file_helper=启动文件是您程序执行的入口文件,必须是以.py结尾的文件。
modelarts.train_job.boot_file_place=填写启动文件路径,默认为train.py
modelarts.train_job.dataset=数据集
modelarts.train_job.run_parameter=运行参数
modelarts.train_job.add_run_parameter=增加运行参数
modelarts.train_job.parameter_name=参数名
modelarts.train_job.parameter_value=参数值
modelarts.train_job.resource_setting=资源设置
modelarts.train_job.resource_setting_info=资源信息
modelarts.train_job.resource_pool=资源池
modelarts.train_job.resource_type=资源类型
modelarts.train_job.standard=规格
modelarts.train_job.NAS_address=NAS地址
modelarts.train_job.NAS_mount_path=NAS挂载路径
modelarts.train_job.query_whether_save_parameter=保存作业参数
modelarts.train_job.save_helper=保存当前作业的配置参数,后续您可以使用已保存的配置参数快速创建训练作业。
modelarts.train_job.common_frame=常用框架
modelarts.train_job.amount_of_compute_node=计算节点个数
modelarts.train_job.job_parameter_name=作业参数名称
modelarts.train_job.parameter_description=作业参数描述
modelarts.log=日志
modelarts.version_manage=版本管理
modelarts.back=返回
modelarts.train_job_para_admin=作业参数管理
modelarts.train_job_para.edit=编辑
modelarts.train_job_para.connfirm=确定


template.items=模板选项
template.git_content=Git数据(默认分支)


BIN
public/img/org-jd@2x-80.jpg View File

Before After
Width: 401  |  Height: 121  |  Size: 33 kB Width: 201  |  Height: 80  |  Size: 6.5 kB

+ 2
- 2
public/self/labelTaskPage.js View File

@@ -309,11 +309,11 @@ function label_task_create(task_name, relate_task_id, taskType,assign_user_id,la
success:function(res){
console.log(res);
if(res.code == 0){
alert("自动标注任务创建成功!");
alert("标注任务创建成功!");
createsucced = true;
}
else{
alert("创建自动标注任务失败," + res.message);
alert("创建标注任务失败," + res.message);
createsucced = false;
}
},


+ 1
- 1
routers/home.go View File

@@ -281,10 +281,10 @@ func ExploreDatasets(ctx *context.Context) {
}

pager := context.NewPagination(int(count), opts.PageSize, page, 5)
ctx.Data["Keyword"] = opts.Keyword
pager.SetDefaultParams(ctx)
ctx.Data["Page"] = pager

ctx.Data["Keyword"] = opts.Keyword
ctx.Data["Datasets"] = datasets
ctx.Data["Total"] = count
ctx.Data["PageIsDatasets"] = true


+ 66
- 10
routers/repo/attachment.go View File

@@ -262,10 +262,15 @@ func GetAttachment(ctx *context.Context) {
return
}
} else {
url, err = storage.ObsGetPreSignedUrl(attach.UUID, attach.Name)
if err != nil {
ctx.ServerError("ObsGetPreSignedUrl", err)
return
if setting.PROXYURL != "" {
url = setting.PROXYURL + "/obs_proxy_download?uuid=" + attach.UUID + "&file_name=" + attach.Name
log.Info("return url=" + url)
} else {
url, err = storage.ObsGetPreSignedUrl(attach.UUID, attach.Name)
if err != nil {
ctx.ServerError("ObsGetPreSignedUrl", err)
return
}
}
}

@@ -273,7 +278,6 @@ func GetAttachment(ctx *context.Context) {
ctx.ServerError("Update", err)
return
}

http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently)
} else {
fr, err := storage.Attachments.Open(attach.RelativePath())
@@ -282,7 +286,6 @@ func GetAttachment(ctx *context.Context) {
return
}
defer fr.Close()

if err = increaseDownloadCount(attach, dataSet); err != nil {
ctx.ServerError("Update", err)
return
@@ -662,6 +665,53 @@ func NewMultipart(ctx *context.Context) {
}
}

func PutOBSProxyUpload(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadId")
partNumber := ctx.QueryInt("partNumber")
fileName := ctx.Query("file_name")

RequestBody := ctx.Req.Body()

if RequestBody == nil {
ctx.Error(500, fmt.Sprintf("FormFile: %v", RequestBody))
return
}

err := storage.ObsMultiPartUpload(uuid, uploadID, partNumber, fileName, RequestBody.ReadCloser())
if err != nil {
log.Info("upload error.")
}
}

func GetOBSProxyDownload(ctx *context.Context) {
uuid := ctx.Query("uuid")
fileName := ctx.Query("file_name")

body, err := storage.ObsDownload(uuid, fileName)
if err != nil {
log.Info("upload error.")
} else {
defer body.Close()
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName)
ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
p := make([]byte, 1024)
var readErr error
var readCount int
// 读取对象内容
for {
readCount, readErr = body.Read(p)
if readCount > 0 {
ctx.Resp.Write(p[:readCount])
//fmt.Printf("%s", p[:readCount])
}
if readErr != nil {
break
}
}
}
}

func GetMultipartUploadUrl(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
@@ -689,10 +739,16 @@ func GetMultipartUploadUrl(ctx *context.Context) {
return
}
} else {
url, err = storage.ObsGenMultiPartSignedUrl(uuid, uploadID, partNumber, fileName)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
if setting.PROXYURL != "" {
url = setting.PROXYURL + "/obs_proxy_multipart?uuid=" + uuid + "&uploadId=" + uploadID + "&partNumber=" + fmt.Sprint(partNumber) + "&file_name=" + fileName
log.Info("return url=" + url)
} else {
url, err = storage.ObsGenMultiPartSignedUrl(uuid, uploadID, partNumber, fileName)
if err != nil {
ctx.Error(500, fmt.Sprintf("ObsGenMultiPartSignedUrl failed: %v", err))
return
}
log.Info("url=" + url)
}
}



+ 31
- 11
routers/repo/cloudbrain.go View File

@@ -40,6 +40,8 @@ var (
categories *models.Categories
)

var jobNamePattern = regexp.MustCompile(`^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$`)

// MustEnableDataset check if repository enable internal cb
func MustEnableCloudbrain(ctx *context.Context) {
if !ctx.Repo.CanRead(models.UnitTypeCloudBrain) {
@@ -200,6 +202,11 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
gpuQueue := setting.JobType
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
resourceSpecId := form.ResourceSpecId
if !jobNamePattern.MatchString(jobName) {
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplModelArtsNew, &form)
return
}

if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) && jobType != string(models.JobTypeBrainScore) {
log.Error("jobtype error:", jobType, ctx.Data["MsgID"])
@@ -281,17 +288,30 @@ func CloudBrainShow(ctx *context.Context) {
if result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
jobRes.Resource.Memory = strings.ReplaceAll(jobRes.Resource.Memory, "Mi", "MB")
ctx.Data["result"] = jobRes
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
ctx.Data["taskRes"] = taskRes
task.Status = taskRes.TaskStatuses[0].State
task.ContainerID = taskRes.TaskStatuses[0].ContainerID
task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
err = models.UpdateJob(task)
if err != nil {
ctx.Data["error"] = err.Error()
if jobRes.JobStatus.State != string(models.JobFailed) {
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
ctx.Data["taskRes"] = taskRes
task.Status = taskRes.TaskStatuses[0].State
task.ContainerID = taskRes.TaskStatuses[0].ContainerID
task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
err = models.UpdateJob(task)
if err != nil {
ctx.Data["error"] = err.Error()
}
} else {
task.Status = jobRes.JobStatus.State
taskRes := models.TaskPod{TaskStatuses: []models.TaskStatuses{
{
State: jobRes.JobStatus.State,
},
}}
ctx.Data["taskRes"] = taskRes
jobRes.JobStatus.StartTime = time.Unix(int64(task.CreatedUnix), 0).Format("2006-01-02 15:04:05")
jobRes.JobStatus.EndTime = time.Unix(int64(task.UpdatedUnix), 0).Format("2006-01-02 15:04:05")
}

ctx.Data["result"] = jobRes
}

ctx.Data["task"] = task
@@ -351,7 +371,7 @@ func CloudBrainStop(ctx *context.Context) {
return
}

if task.Status == string(models.JobStopped) {
if task.Status == string(models.JobStopped) || task.Status == string(models.JobFailed) {
log.Error("the job(%s) has been stopped", task.JobName, ctx.Data["msgID"])
ctx.ServerError("the job has been stopped", errors.New("the job has been stopped"))
return
@@ -454,7 +474,7 @@ func CloudBrainDel(ctx *context.Context) {
return
}

if task.Status != string(models.JobStopped) {
if task.Status != string(models.JobStopped) && task.Status != string(models.JobFailed){
log.Error("the job(%s) has not been stopped", task.JobName, ctx.Data["msgID"])
ctx.ServerError("the job has not been stopped", errors.New("the job has not been stopped"))
return


+ 5
- 2
routers/repo/modelarts.go View File

@@ -100,7 +100,10 @@ func ModelArtsCreate(ctx *context.Context, form auth.CreateModelArtsForm) {
uuid := form.Attachment
description := form.Description
//repo := ctx.Repo.Repository

if !jobNamePattern.MatchString(jobName) {
ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplModelArtsNew, &form)
return
}
err := modelarts.GenerateTask(ctx, jobName, uuid, description)
if err != nil {
ctx.RenderWithErr(err.Error(), tplModelArtsNew, &form)
@@ -228,7 +231,7 @@ func ModelArtsDel(ctx *context.Context) {
return
}

if task.Status != string(models.JobStopped) {
if task.Status != string(models.ModelArtsCreateFailed) && task.Status != string(models.ModelArtsStartFailed) && task.Status != string(models.ModelArtsStopped){
log.Error("the job(%s) has not been stopped", task.JobName)
ctx.ServerError("the job has not been stopped", errors.New("the job has not been stopped"))
return


+ 167
- 39
routers/repo/repo_statistic.go View File

@@ -3,6 +3,8 @@ package repo
import (
"time"

"code.gitea.io/gitea/modules/normalization"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
@@ -17,6 +19,8 @@ func RepoStatisticAuto() {

func RepoStatisticDaily(date string) {
log.Info("%s", date)
log.Info("begin Repo Statistic")
t, _ := time.Parse("2006-01-02", "date")
if err := models.DeleteRepoStatDaily(date); err != nil {
log.Error("DeleteRepoStatDaily failed: %v", err.Error())
return
@@ -28,13 +32,25 @@ func RepoStatisticDaily(date string) {
return
}

for _, repo := range repos {
var reposRadar = make([]*models.RepoStatistic, 0)

var minRepoRadar models.RepoStatistic
var maxRepoRadar models.RepoStatistic

for i, repo := range repos {
log.Info("start statistic: %s", repo.Name)
var numDevMonths, numWikiViews, numContributor, numKeyContributor, numCommitsGrowth, numCommitLinesGrowth, numContributorsGrowth int64
repoGitStat, err := models.GetRepoKPIStats(repo)
if err != nil {
log.Error("GetRepoKPIStats failed: %s", repo.Name)
log.Error("failed statistic: %s", repo.Name)
continue
} else {
numDevMonths = repoGitStat.DevelopAge
numKeyContributor = repoGitStat.KeyContributors
numWikiViews = repoGitStat.WikiPages
numContributor = repoGitStat.Contributors
numCommitsGrowth = repoGitStat.CommitsAdded
numCommitLinesGrowth = repoGitStat.CommitLinesModified
numContributorsGrowth = repoGitStat.ContributorsAdded
}

var issueFixedRate float32
@@ -42,66 +58,178 @@ func RepoStatisticDaily(date string) {
issueFixedRate = float32(repo.NumClosedIssues) / float32(repo.NumIssues)
}

numVersions, err := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{})
var numVersions int64
numVersions, err = models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{})
if err != nil {
log.Error("GetReleaseCountByRepoID failed: %s", repo.Name)
log.Error("failed statistic: %s", repo.Name)
continue
log.Error("GetReleaseCountByRepoID failed(%s): %v", repo.Name, err)
}

datasetSize, err := getDatasetSize(repo)
var datasetSize int64
datasetSize, err = getDatasetSize(repo)
if err != nil {
log.Error("getDatasetSize failed: %s", repo.Name)
log.Error("failed statistic: %s", repo.Name)
continue
log.Error("getDatasetSize failed(%s): %v", repo.Name, err)
}

numComments, err := models.GetCommentCountByRepoID(repo.ID)
var numComments int64
numComments, err = models.GetCommentCountByRepoID(repo.ID)
if err != nil {
log.Error("GetCommentCountByRepoID failed: %s", repo.Name)
log.Error("failed statistic: %s", repo.Name)
continue
log.Error("GetCommentCountByRepoID failed(%s): %v", repo.Name, err)
}

beginTime, endTime := getStatTime(date)
numVisits, err := repository.AppointProjectView(repo.OwnerName, repo.Name, beginTime, endTime)
var numVisits int
numVisits, err = repository.AppointProjectView(repo.OwnerName, repo.Name, beginTime, endTime)
if err != nil {
log.Error("Get numVisits failed", err)
numVisits = 0
log.Error("AppointProjectView failed(%s): %v", repo.Name, err)
}

repoStat := models.RepoStatistic{
RepoID: repo.ID,
Date: date,
NumWatches: int64(repo.NumWatches),
NumStars: int64(repo.NumStars),
NumDownloads: repo.CloneCnt,
NumComments: numComments,
NumVisits: int64(numVisits),
NumClosedIssues: int64(repo.NumClosedIssues),
NumVersions: numVersions,
NumDevMonths: repoGitStat.DevelopAge,
RepoSize: repo.Size,
DatasetSize: datasetSize,
NumModels: 0,
NumWikiViews: repoGitStat.WikiPages,
NumCommits: repo.NumCommit,
NumIssues: int64(repo.NumIssues),
NumPulls: int64(repo.NumPulls),
IssueFixedRate: issueFixedRate,
NumContributor: repoGitStat.Contributors,
NumKeyContributor: repoGitStat.KeyContributors,
RepoID: repo.ID,
Date: date,
NumWatches: int64(repo.NumWatches),
NumStars: int64(repo.NumStars),
NumDownloads: repo.CloneCnt,
NumComments: numComments,
NumVisits: int64(numVisits),
NumClosedIssues: int64(repo.NumClosedIssues),
NumVersions: numVersions,
NumDevMonths: numDevMonths,
RepoSize: repo.Size,
DatasetSize: datasetSize,
NumModels: 0,
NumWikiViews: numWikiViews,
NumCommits: repo.NumCommit,
NumIssues: int64(repo.NumIssues),
NumPulls: int64(repo.NumPulls),
IssueFixedRate: issueFixedRate,
NumContributor: numContributor,
NumKeyContributor: numKeyContributor,
NumCommitsGrowth: numCommitsGrowth,
NumCommitLinesGrowth: numCommitLinesGrowth,
NumContributorsGrowth: numContributorsGrowth,
}

dayBeforeDate := t.AddDate(0, 0, -1).Format("2006-01-02")
repoStatisticsBefore, err := models.GetRepoStatisticByDate(dayBeforeDate)

if err != nil {
log.Error("get data of day before the date failed ", err)
} else {
if len(repoStatisticsBefore) > 0 {
repoStatisticBefore := repoStatisticsBefore[0]
repoStat.NumWatchesAdded = repoStat.NumWatches - repoStatisticBefore.NumWatches
repoStat.NumStarsAdded = repoStat.NumStars - repoStatisticBefore.NumStars
repoStat.NumForksAdded = repoStat.NumForks - repoStatisticBefore.NumForks
repoStat.NumDownloadsAdded = repoStat.NumDownloads - repoStatisticBefore.NumDownloads
repoStat.NumCommentsAdded = repoStat.NumComments - repoStatisticBefore.NumComments
repoStat.NumClosedIssuesAdded = repoStat.NumClosedIssues - repoStatisticBefore.NumClosedIssues
repoStat.NumCommitsAdded = repoStat.NumCommits - repoStatisticBefore.NumCommits
repoStat.NumIssuesAdded = repoStat.NumIssues - repoStatisticBefore.NumIssues
repoStat.NumPullsAdded = repoStat.NumPulls - repoStatisticBefore.NumPulls
repoStat.NumContributorAdded = repoStat.NumContributor - repoStatisticBefore.NumContributor
}
}
day4MonthsAgo := t.AddDate(0, -4, 0)
repoStatisticFourMonthsAgo, err := models.GetOneRepoStatisticBeforeTime(day4MonthsAgo)
if err != nil {
log.Error("Get data of 4 moth ago failed.", err)
} else {
repoStat.NumCommentsGrowth = repoStat.NumComments - repoStatisticFourMonthsAgo.NumComments
repoStat.NumIssuesGrowth = repoStat.NumIssues - repoStatisticFourMonthsAgo.NumIssues
}

if _, err = models.InsertRepoStat(&repoStat); err != nil {
log.Error("InsertRepoStat failed: %s", repo.Name)
log.Error("InsertRepoStat failed(%s): %v", repo.Name, err)
log.Error("failed statistic: %s", repo.Name)
continue
}

tempRepoStat := models.RepoStatistic{
RepoID: repoStat.RepoID,
Date: repoStat.Date,
Impact: normalization.GetImpactInitValue(repoStat.NumWatches, repoStat.NumStars, repoStat.NumForks, repoStat.NumDownloads, repoStat.NumComments, repoStat.NumVisits),
Completeness: normalization.GetCompleteInitValue(repoStat.NumClosedIssues, repoStat.NumVersions, repoStat.NumDevMonths, repoStat.DatasetSize, repoStat.NumModels, repoStat.NumWikiViews),
Liveness: normalization.GetLivenessInitValue(repoStat.NumCommits, repoStat.NumIssues, repoStat.NumPulls, repoStat.NumVisits),
ProjectHealth: normalization.GetProjectHealthInitValue(repoStat.IssueFixedRate),
TeamHealth: normalization.GetTeamHealthInitValue(repoStat.NumContributor, repoStat.NumKeyContributor, repoStat.NumContributorsGrowth),
Growth: normalization.GetRepoGrowthInitValue(repoStat.NumCommitLinesGrowth, repoStat.NumIssuesGrowth, repoStat.NumCommitsGrowth, repoStat.NumContributorsGrowth, repoStat.NumCommentsGrowth),
}

reposRadar = append(reposRadar, &tempRepoStat)

if i == 0 {
minRepoRadar = tempRepoStat
maxRepoRadar = tempRepoStat
} else {

if tempRepoStat.Impact < minRepoRadar.Impact {
minRepoRadar.Impact = tempRepoStat.Impact
}

if tempRepoStat.Impact > maxRepoRadar.Impact {
maxRepoRadar.Impact = tempRepoStat.Impact
}

if tempRepoStat.Completeness < minRepoRadar.Completeness {
minRepoRadar.Completeness = tempRepoStat.Completeness
}

if tempRepoStat.Completeness > maxRepoRadar.Completeness {
maxRepoRadar.Completeness = tempRepoStat.Completeness
}

if tempRepoStat.Liveness < minRepoRadar.Completeness {
minRepoRadar.Liveness = tempRepoStat.Liveness
}

if tempRepoStat.Liveness > maxRepoRadar.Liveness {
maxRepoRadar.Liveness = tempRepoStat.Liveness
}

if tempRepoStat.ProjectHealth < minRepoRadar.ProjectHealth {
minRepoRadar.ProjectHealth = tempRepoStat.ProjectHealth
}

if tempRepoStat.ProjectHealth > maxRepoRadar.ProjectHealth {
maxRepoRadar.ProjectHealth = tempRepoStat.ProjectHealth
}

if tempRepoStat.TeamHealth < minRepoRadar.TeamHealth {
minRepoRadar.TeamHealth = tempRepoStat.TeamHealth
}

if tempRepoStat.TeamHealth > maxRepoRadar.TeamHealth {
maxRepoRadar.TeamHealth = tempRepoStat.TeamHealth
}

if tempRepoStat.Growth < minRepoRadar.Growth {
minRepoRadar.Growth = tempRepoStat.Growth
}

if tempRepoStat.Growth > maxRepoRadar.Growth {
maxRepoRadar.Growth = tempRepoStat.Growth
}

}

log.Info("finish statistic: %s", repo.Name)
}

//radar map
log.Info("begin statistic radar")
for _, radarInit := range reposRadar {
radarInit.Impact = normalization.Normalization(radarInit.Impact, minRepoRadar.Impact, maxRepoRadar.Impact)
radarInit.Completeness = normalization.Normalization(radarInit.Completeness, minRepoRadar.Completeness, maxRepoRadar.Completeness)
radarInit.Liveness = normalization.Normalization(radarInit.Liveness, minRepoRadar.Liveness, maxRepoRadar.Liveness)
radarInit.ProjectHealth = normalization.Normalization(radarInit.ProjectHealth, minRepoRadar.ProjectHealth, maxRepoRadar.ProjectHealth)
radarInit.TeamHealth = normalization.Normalization(radarInit.TeamHealth, minRepoRadar.TeamHealth, maxRepoRadar.TeamHealth)
radarInit.Growth = normalization.Normalization(radarInit.Growth, minRepoRadar.Growth, maxRepoRadar.Growth)
radarInit.RadarTotal = normalization.GetRadarValue(radarInit.Impact, radarInit.Completeness, radarInit.Liveness, radarInit.ProjectHealth, radarInit.TeamHealth, radarInit.Growth)
models.UpdateRepoStat(radarInit)
}

log.Info("finish statistic: radar")

}

func getDatasetSize(repo *models.Repository) (int64, error) {


+ 94
- 0
routers/repo/repo_summary_statistic.go View File

@@ -0,0 +1,94 @@
package repo

import (
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
)

func SummaryStatistic() {
log.Info("Generate summary statistic begin")
yesterday := time.Now().AddDate(0, 0, -1).Format("2006-01-02")
SummaryStatisticDaily(yesterday)
log.Info("Generate summary statistic end")
}

func SummaryStatisticDaily(date string) {
log.Info("%s", date)
if err := models.DeleteSummaryStatisticDaily(date); err != nil {
log.Error("DeleteRepoStatDaily failed: %v", err.Error())
return
}

//user number
userNumber, err := models.GetUsersCount()
if err != nil {
log.Error("can not get user number", err)
userNumber = 0
}
//organization number
organizationNumber, err := models.GetOrganizationsCount()
if err != nil {
log.Error("can not get orgnazition number", err)
organizationNumber = 0
}
// repository number
repositoryNumer, err := models.GetAllRepositoriesCount()
if err != nil {
log.Error("can not get repository number", err)
repositoryNumer = 0
}
//repository size
repositorySize, err := models.GetAllRepositoriesSize()
if err != nil {
log.Error("can not get repository size", err)
repositorySize = 0
}
// dataset size
allDatasetSize, err := models.GetAllAttachmentSize()
if err != nil {
log.Error("can not get dataset size", err)
allDatasetSize = 0
}
//topic repo number
topics, err := models.GetAllUsedTopics()
if err != nil {
log.Error("can not get topics", err)
}
var topicsCount [11]int
for _, topic := range topics {

index, exists := models.DomainMap[topic.Name]
if exists {
topicsCount[index] = topic.RepoCount
}

}

summaryStat := models.SummaryStatistic{
Date: date,
NumUsers: userNumber,
RepoSize: repositorySize,
DatasetSize: allDatasetSize,
NumOrganizations: organizationNumber,
NumRepos: repositoryNumer,
NumRepoBigModel: topicsCount[0],
NumRepoAI: topicsCount[1],
NumRepoVision: topicsCount[2],
NumRepoNLP: topicsCount[3],
NumRepoML: topicsCount[4],
NumRepoNN: topicsCount[5],
NumRepoAutoDrive: topicsCount[6],
NumRepoRobot: topicsCount[7],
NumRepoLeagueLearn: topicsCount[8],
NumRepoDataMining: topicsCount[9],
NumRepoRISC: topicsCount[10],
}

if _, err = models.InsertSummaryStatistic(&summaryStat); err != nil {
log.Error("Insert summary Stat failed: %v", err.Error())
}

log.Info("finish summary statistic")
}

+ 9
- 5
routers/routes/routes.go View File

@@ -6,13 +6,15 @@ package routes

import (
"bytes"
"code.gitea.io/gitea/routers/operation"
"encoding/gob"
"net/http"
"path"
"text/template"
"time"

"code.gitea.io/gitea/routers/operation"
"code.gitea.io/gitea/routers/private"

"code.gitea.io/gitea/routers/secure"

"code.gitea.io/gitea/models"
@@ -33,7 +35,6 @@ import (
"code.gitea.io/gitea/routers/dev"
"code.gitea.io/gitea/routers/events"
"code.gitea.io/gitea/routers/org"
"code.gitea.io/gitea/routers/private"
"code.gitea.io/gitea/routers/repo"
"code.gitea.io/gitea/routers/user"
userSetting "code.gitea.io/gitea/routers/user/setting"
@@ -113,14 +114,14 @@ func RouterHandler(level log.Level) func(ctx *macaron.Context) {
}

// SetLogMsgID set msgID in Context
func SetLogMsgID() func(ctx *macaron.Context) {
func SetLogMsgID() macaron.Handler {
return func(ctx *macaron.Context) {
start := time.Now()

uuid := gouuid.NewV4().String()
ctx.Data["MsgID"] = uuid

log.Info("Started %s %s for %s", log.ColoredMethod(ctx.Req.Method), ctx.Req.URL.RequestURI(), ctx.RemoteAddr(), ctx.Data["MsgID"])
log.Info("%s Started %s %s for %s", ctx.Data["SignedUserName"], log.ColoredMethod(ctx.Req.Method), ctx.Req.URL.RequestURI(), ctx.RemoteAddr(), ctx.Data["MsgID"])

rw := ctx.Resp.(macaron.ResponseWriter)
ctx.Next()
@@ -148,7 +149,7 @@ func NewMacaron() *macaron.Macaron {
m.Use(macaron.Logger())
}
}
m.Use(SetLogMsgID())
//m.Use(SetLogMsgID())
// Access Logger is similar to Router Log but more configurable and by default is more like the NCSA Common Log format
if setting.EnableAccessLog {
setupAccessLogger(m)
@@ -256,6 +257,7 @@ func NewMacaron() *macaron.Macaron {
DisableDebug: !setting.EnablePprof,
}))
m.Use(context.Contexter())
m.Use(SetLogMsgID())
// OK we are now set-up enough to allow us to create a nicer recovery than
// the default macaron recovery
m.Use(context.Recovery())
@@ -565,6 +567,8 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/private", repo.UpdatePublicAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload)
m.Get("/obs_proxy_download", repo.GetOBSProxyDownload)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)
m.Post("/update_chunk", repo.UpdateMultipart)


+ 6
- 2
templates/explore/dataset_list.tmpl View File

@@ -29,8 +29,12 @@
{{.Repo.OwnerName}} / {{.Title}}
</a>
<div class="ui right metas">
<span class="text grey">{{svg "octicon-tasklist" 16}} {{$.i18n.Tr (printf "dataset.task.%s" .Task)}}</span>
<span class="text grey">{{svg "octicon-tag" 16}}{{$.i18n.Tr (printf "dataset.category.%s" .Category)}}</span>
{{if .Task}}
<span class="text grey">{{svg "octicon-tasklist" 16}} {{$.i18n.Tr (printf "dataset.task.%s" .Task)}}</span>
{{end}}
{{if .Category}}
<span class="text grey">{{svg "octicon-tag" 16}}{{$.i18n.Tr (printf "dataset.category.%s" .Category)}}</span>
{{end}}
<span class="text grey">{{svg "octicon-flame" 16}} {{.DownloadTimes}}</span>
</div>
</div>


+ 2
- 2
templates/repo/cloudbrain/index.tmpl View File

@@ -337,9 +337,9 @@
调试
</a>
<form id="stopForm-{{.JobID}}" action="{{if eq .Status "STOPPED"}}javascript:void(0){{else}}{{$.Link}}/{{.JobID}}/stop{{end}}" method="post" style="margin-left:-1px;">
<form id="stopForm-{{.JobID}}" action="{{if or (eq .Status "STOPPED") (eq .Status "FAILED")}}javascript:void(0){{else}}{{$.Link}}/{{.JobID}}/stop{{end}}" method="post" style="margin-left:-1px;">
{{$.CsrfTokenHtml}}
<a class="ui basic {{if eq .Status "STOPPED"}}disabled {{else}}blue {{end}}button" onclick="document.getElementById('stopForm-{{.JobID}}').submit();">
<a class="ui basic {{if or (eq .Status "STOPPED") (eq .Status "FAILED")}}disabled {{else}}blue {{end}}button" onclick="document.getElementById('stopForm-{{.JobID}}').submit();">
停止
</a>
</form>


+ 3
- 3
templates/repo/cloudbrain/new.tmpl View File

@@ -120,7 +120,7 @@
<div class="repository new repo ui middle very relaxed page grid">
<div class="column">
{{template "base/alert" .}}
<div class="ui positive message" id="messageInfo">
<div class="ui negative message" id="messageInfo">
<p></p>
</div>
<form id="form_id" class="ui form" action="{{.Link}}" method="post">
@@ -259,11 +259,11 @@
let value_task = $("input[name='job_name']").val()
let value_image = $("input[name='image']").val()
let value_data = $("input[name='attachment']").val()
let re = /^[a-z0-9][a-z0-9-_]{1,35}[^_]$/
let re = /^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$/
let flag = re.test(value_task)
if(!flag){
$('#messageInfo').css('display','block')
let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-、最长36个字符,不能下划线(-)结尾。'
let str = '只能以小写字母或数字开头且只包含小写字母、数字、_和-,不能以_结尾,最长36个字符。'
$('#messageInfo p').text(str)
return false
}


+ 1
- 1
templates/repo/datasets/dataset_list.tmpl View File

@@ -17,7 +17,7 @@
<span class="ui basic basic button clipboard" data-clipboard-text="{{.DownloadURL}}" data-tooltip='{{$.i18n.Tr "dataset.copy_url"}}' data-clipboard-action="copy"{{if ne $.Type 0}} style="display:none;"{{end}}>{{svg "octicon-file" 16}}</span>
<span class="ui basic basic button clipboard" data-clipboard-text="{{.FileChunk.Md5}}" data-tooltip='{{$.i18n.Tr "dataset.copy_md5"}}' data-clipboard-action="copy">{{svg "octicon-file-binary" 16}}</span>
</div>
{{if ne .DecompressState 0}}
{{if eq .DecompressState 1}}
<div class="ui left mini icon buttons">
<a class="ui basic blue button" href="datasets/dirs/{{.UUID}}?type={{$.Type}}" data-tooltip='{{$.i18n.Tr "dataset.directory"}}'>{{svg "octicon-file-directory" 16}}</a>
{{if $.IsSigned}}


+ 1
- 1
templates/repo/datasets/label/index.tmpl View File

@@ -123,7 +123,7 @@
<select name="pre_predict_task" id="dataset_list_auto" onchange="dataset_auto_sele_Change(this)">
{{if .Attachments}}
{{range .Attachments}}
<option value="{{.UUID}}">{{.Name}}</option>
<option value="{{.UUID}}">{{.Name}}</option>
{{end}}
{{end}}
</select>


+ 1
- 1
templates/repo/modelarts/new.tmpl View File

@@ -100,7 +100,7 @@
<div class="repository new repo ui middle very relaxed page grid">
<div class="column">
{{template "base/alert" .}}
<div class="ui positive message" id="messageInfo">
<div class="ui negative message" id="messageInfo">
<p></p>
</div>
<form class="ui form" id="form_id" action="{{.Link}}" method="post">


+ 13
- 10
web_src/js/components/EditTopics.vue View File

@@ -11,16 +11,16 @@
<div class="icon-wrapper">
<i style="line-height: 1.5;color: #303643;font-weight: 900;" v-if="showInitTopic[i]" class="el-icon-check" ></i>
</div>
<div class="text">{{arr.topic_name}} </div>
<div class="text">{{arr.topic_name.toLowerCase()}} </div>
</div>
<div v-if="showInputValue" class="addition item-text" @click="postTopic">
点击或回车添加<b class="user-add-label-text">{{input}}</b>标签
点击或回车添加<b class="user-add-label-text">{{input.toLowerCase()}}</b>标签
</div>
<div v-if="showAddTopic" class="item-text" @click="addPostTopic">
<div class="icon-wrapper">
<i style="line-height: 1.5;color: #303643;font-weight: 900;" v-if="showAddFlage" class="el-icon-check" ></i>
</div>
<div class="text">{{input}}</div>
<div class="text">{{input.toLowerCase()}}</div>
</div>

</div>
@@ -134,7 +134,7 @@ export default {
this.showSearchTopic = true
}
else if(this.arrayTopics.indexOf(this.input)>-1){
else if(this.arrayTopics.indexOf(this.input.toLowerCase())>-1){
this.showInputValue = false
this.showSearchTopic = false
@@ -142,7 +142,7 @@ export default {
this.showInitTopic = []
let timestamp=new Date().getTime()
this.params.q = this.input
this.params.q = this.input.toLowerCase()
this.params._ = timestamp
this.$axios.get('/api/v1/topics/search',{
params:this.params
@@ -164,7 +164,7 @@ export default {
let findelement = this.array.some((item)=>{
return item.topic_name===this.input
return item.topic_name===this.input.toLowerCase()
})
this.showInputValue = !findelement
@@ -224,11 +224,11 @@ export default {
return
}else{
let topic = this.input
if(this.arrayTopics.includes(topic)){
if(this.arrayTopics.includes(topic.toLowerCase())){
return
}
else{
this.arrayTopics.push(topic)
this.arrayTopics.push(topic.toLowerCase())
let topics = this.arrayTopics
let strTopics = topics.join(',')
@@ -250,7 +250,10 @@ export default {
addPostTopic(){
if(this.showAddFlage){
this.arrayTopics.pop()
// this.arrayTopics.pop()

let cancleIndex = this.arrayTopics.indexOf(this.input)
this.arrayTopics.splice(cancleIndex,1)
let topics = this.arrayTopics
let strTopics = topics.join(',')
let data = this.qs.stringify({
@@ -268,7 +271,7 @@ export default {
}
else if(!this.showAddFlage){
let topic = this.input
this.arrayTopics.push(topic)
this.arrayTopics.push(topic.toLowerCase())
let topics = this.arrayTopics
let strTopics = topics.join(',')


Loading…
Cancel
Save