Browse Source

Merge branch 'zouap' of https://git.openi.org.cn/OpenI/aiforge into zouap

pull/1036/head
zhoupzh 3 years ago
parent
commit
9aae0caa61
7 changed files with 381 additions and 118 deletions
  1. +46
    -0
      models/cloudbrain.go
  2. +0
    -9
      models/custom_migrations.go
  3. +1
    -0
      models/models.go
  4. +294
    -64
      models/user_business_analysis.go
  5. +16
    -10
      routers/repo/ai_model_manage.go
  6. +23
    -34
      routers/repo/user_data_analysis.go
  7. +1
    -1
      routers/routes/routes.go

+ 46
- 0
models/cloudbrain.go View File

@@ -903,6 +903,52 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) {
return cloudbrains, count, nil return cloudbrains, count, nil
} }


func QueryModelTrainJobVersionList(jobId string) ([]*CloudbrainInfo, int, error) {
sess := x.NewSession()
defer sess.Close()

var cond = builder.NewCond()

cond = cond.And(
builder.Eq{"cloudbrain.job_id": jobId},
)
cond = cond.And(
builder.Eq{"cloudbrain.Status": "COMPLETED"},
)

sess.OrderBy("cloudbrain.created_unix DESC")
cloudbrains := make([]*CloudbrainInfo, 0)
if err := sess.Table(&Cloudbrain{}).Where(cond).
Find(&cloudbrains); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}

return cloudbrains, int(len(cloudbrains)), nil
}

func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) {
sess := x.NewSession()
defer sess.Close()

var cond = builder.NewCond()

cond = cond.And(
builder.Eq{"cloudbrain.repo_id": repoId},
)
cond = cond.And(
builder.Eq{"cloudbrain.Status": "COMPLETED"},
)

sess.OrderBy("cloudbrain.created_unix DESC")
cloudbrains := make([]*CloudbrainInfo, 0)
if err := sess.Distinct("job_id").Table(&Cloudbrain{}).Where(cond).
Find(&cloudbrains); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}

return cloudbrains, int(len(cloudbrains)), nil
}

func CloudbrainsVersionList(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int, error) { func CloudbrainsVersionList(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int, error) {
sess := x.NewSession() sess := x.NewSession()
defer sess.Close() defer sess.Close()


+ 0
- 9
models/custom_migrations.go View File

@@ -22,7 +22,6 @@ var customMigrations = []CustomMigration{
} }


var customMigrationsStatic = []CustomMigrationStatic{ var customMigrationsStatic = []CustomMigrationStatic{
{"Alter user static table field type ", alterUserStaticTable},
{"Delete organization user history data ", deleteNotDisplayUser}, {"Delete organization user history data ", deleteNotDisplayUser},
{"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate}, {"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate},
} }
@@ -59,14 +58,6 @@ func syncTopicStruct(x *xorm.Engine) error {
return err return err
} }


func alterUserStaticTable(x *xorm.Engine, static *xorm.Engine) error {
alterSql := "alter table public.user_business_analysis alter column open_i_index type double precision"

_, err := static.Exec(alterSql)
return err

}

func deleteNotDisplayUser(x *xorm.Engine, static *xorm.Engine) error { func deleteNotDisplayUser(x *xorm.Engine, static *xorm.Engine) error {


querySQL := "select id,name from public.user where type=1" querySQL := "select id,name from public.user where type=1"


+ 1
- 0
models/models.go View File

@@ -140,6 +140,7 @@ func init() {
new(RepoStatistic), new(RepoStatistic),
new(SummaryStatistic), new(SummaryStatistic),
new(UserBusinessAnalysis), new(UserBusinessAnalysis),
new(UserBusinessAnalysisAll),
new(UserLoginLog), new(UserLoginLog),
) )




+ 294
- 64
models/user_business_analysis.go View File

@@ -4,14 +4,16 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"sort" "sort"
"strconv"
"time" "time"


"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder" "xorm.io/builder"
"xorm.io/xorm"
) )


type UserBusinessAnalysis struct {
type UserBusinessAnalysisAll struct {
ID int64 `xorm:"pk"` ID int64 `xorm:"pk"`


CountDate int64 `xorm:"pk"` CountDate int64 `xorm:"pk"`
@@ -76,6 +78,71 @@ type UserBusinessAnalysis struct {
DataDate string `xorm:"NULL"` DataDate string `xorm:"NULL"`
} }


type UserBusinessAnalysis struct {
ID int64 `xorm:"pk"`

CountDate int64 `xorm:"pk"`

//action :ActionMergePullRequest // 11
CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCommitRepo // 5
CommitCount int `xorm:"NOT NULL DEFAULT 0"`

//action :ActionCreateIssue // 6
IssueCount int `xorm:"NOT NULL DEFAULT 0"`

//comment table current date
CommentCount int `xorm:"NOT NULL DEFAULT 0"`

//watch table current date
FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//star table current date
StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//follow table
WatchedCount int `xorm:"NOT NULL DEFAULT 0"`

// user table
GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`

//
CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`

//attachement table
CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`

//0
CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`

//issue, issueassignees
SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`

//baike
EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`

//user
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`

//repo
CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`

//login count, from elk
LoginCount int `xorm:"NOT NULL DEFAULT 0"`

//openi index
OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`

//user
Email string `xorm:"NOT NULL"`

//user
Name string `xorm:"NOT NULL"`

DataDate string `xorm:"NULL"`
}

type UserBusinessAnalysisQueryOptions struct { type UserBusinessAnalysisQueryOptions struct {
ListOptions ListOptions
UserName string UserName string
@@ -93,46 +160,12 @@ func (ulist UserBusinessAnalysisList) Less(i, j int) bool {
return ulist[i].ID > ulist[j].ID return ulist[i].ID > ulist[j].ID
} }


func QueryUserStaticData(startTime int64, endTime int64) []*UserBusinessAnalysis {
log.Info("query startTime =" + fmt.Sprint(startTime) + " endTime=" + fmt.Sprint(endTime))
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()

statictisSess.Select("*").Table("user_business_analysis").Where(" count_date>=" + fmt.Sprint(startTime) + " and count_date<=" + fmt.Sprint(endTime)).OrderBy("count_date desc")

userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
statictisSess.Find(&userBusinessAnalysisList)
type UserBusinessAnalysisAllList []*UserBusinessAnalysisAll


resultMap := make(map[int64]*UserBusinessAnalysis)
log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList)))
for _, userRecord := range userBusinessAnalysisList {
if _, ok := resultMap[userRecord.ID]; !ok {
resultMap[userRecord.ID] = userRecord
} else {
resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount
resultMap[userRecord.ID].CommitCount += userRecord.CommitCount
resultMap[userRecord.ID].IssueCount += userRecord.IssueCount
resultMap[userRecord.ID].CommentCount += userRecord.CommentCount
resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount
resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount
resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
resultMap[userRecord.ID].LoginCount += userRecord.LoginCount
}
}

userBusinessAnalysisReturnList := UserBusinessAnalysisList{}
for _, v := range resultMap {
userBusinessAnalysisReturnList = append(userBusinessAnalysisReturnList, v)
}
sort.Sort(userBusinessAnalysisReturnList)
log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList)))
return userBusinessAnalysisReturnList
func (ulist UserBusinessAnalysisAllList) Swap(i, j int) { ulist[i], ulist[j] = ulist[j], ulist[i] }
func (ulist UserBusinessAnalysisAllList) Len() int { return len(ulist) }
func (ulist UserBusinessAnalysisAllList) Less(i, j int) bool {
return ulist[i].ID > ulist[j].ID
} }


func getLastCountDate() int64 { func getLastCountDate() int64 {
@@ -153,6 +186,41 @@ func getLastCountDate() int64 {
return pageStartTime.Unix() return pageStartTime.Unix()
} }


func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysisAll, int64) {
log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll))

statictisSess := xStatistic.NewSession()
defer statictisSess.Close()

allCount, err := statictisSess.Count(new(UserBusinessAnalysisAll))
if err != nil {
log.Info("query error." + err.Error())
return nil, 0
}
log.Info("query return total:" + fmt.Sprint(allCount))
if allCount == 0 {
RefreshUserStaticAllTabel()
}
pageSize := 1000
totalPage := int(allCount) / pageSize
userBusinessAnalysisReturnList := UserBusinessAnalysisAllList{}
for i := 0; i <= int(totalPage); i++ {
userBusinessAnalysisAllList := make([]*UserBusinessAnalysisAll, 0)
if err := statictisSess.Table("user_business_analysis_all").OrderBy("id desc").Limit(pageSize, i*pageSize).
Find(&userBusinessAnalysisAllList); err != nil {
return nil, 0
}
log.Info("query " + fmt.Sprint(i+1) + " result size=" + fmt.Sprint(len(userBusinessAnalysisAllList)))
for _, userRecord := range userBusinessAnalysisAllList {
userBusinessAnalysisReturnList = append(userBusinessAnalysisReturnList, userRecord)
}
}

sort.Sort(userBusinessAnalysisReturnList)
log.Info("return size=" + fmt.Sprint(len(userBusinessAnalysisReturnList)))
return userBusinessAnalysisReturnList, allCount
}

func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) { func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) {


log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll)) log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll))
@@ -219,32 +287,43 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
builder.Lte{"count_date": opts.EndTime}, builder.Lte{"count_date": opts.EndTime},
) )
} }
userBusinessAnalysisList = make([]*UserBusinessAnalysis, 0)
if err := statictisSess.Table("user_business_analysis").Where(newAndCond).OrderBy("count_date desc").
Find(&userBusinessAnalysisList); err != nil {

allCount, err := statictisSess.Where(newAndCond).Count(new(UserBusinessAnalysis))
if err != nil {
log.Info("query error." + err.Error())
return nil, 0 return nil, 0
} }
}


log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList)))
for _, userRecord := range userBusinessAnalysisList {
if _, ok := resultMap[userRecord.ID]; !ok {
resultMap[userRecord.ID] = userRecord
} else {
resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount
resultMap[userRecord.ID].CommitCount += userRecord.CommitCount
resultMap[userRecord.ID].IssueCount += userRecord.IssueCount
resultMap[userRecord.ID].CommentCount += userRecord.CommentCount
resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount
resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount
resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
resultMap[userRecord.ID].LoginCount += userRecord.LoginCount
pageSize := 1000
totalPage := int(allCount) / pageSize

for i := 0; i <= int(totalPage); i++ {
userBusinessAnalysisList = make([]*UserBusinessAnalysis, 0)
if err := statictisSess.Table("user_business_analysis").Where(newAndCond).OrderBy("count_date desc").Limit(pageSize, i*pageSize).
Find(&userBusinessAnalysisList); err != nil {
return nil, 0
}
log.Info("query result size=" + fmt.Sprint(len(userBusinessAnalysisList)))
for _, userRecord := range userBusinessAnalysisList {
if _, ok := resultMap[userRecord.ID]; !ok {
resultMap[userRecord.ID] = userRecord
} else {
resultMap[userRecord.ID].CodeMergeCount += userRecord.CodeMergeCount
resultMap[userRecord.ID].CommitCount += userRecord.CommitCount
resultMap[userRecord.ID].IssueCount += userRecord.IssueCount
resultMap[userRecord.ID].CommentCount += userRecord.CommentCount
resultMap[userRecord.ID].FocusRepoCount += userRecord.FocusRepoCount
resultMap[userRecord.ID].StarRepoCount += userRecord.StarRepoCount
resultMap[userRecord.ID].WatchedCount += userRecord.WatchedCount
resultMap[userRecord.ID].CommitCodeSize += userRecord.CommitCodeSize
resultMap[userRecord.ID].CommitDatasetSize += userRecord.CommitDatasetSize
resultMap[userRecord.ID].CommitModelCount += userRecord.CommitModelCount
resultMap[userRecord.ID].SolveIssueCount += userRecord.SolveIssueCount
resultMap[userRecord.ID].EncyclopediasCount += userRecord.EncyclopediasCount
resultMap[userRecord.ID].CreateRepoCount += userRecord.CreateRepoCount
resultMap[userRecord.ID].LoginCount += userRecord.LoginCount
}
}
} }
} }


@@ -257,6 +336,50 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
return userBusinessAnalysisReturnList, count return userBusinessAnalysisReturnList, count
} }


func RefreshUserStaticAllTabel() {

statictisSess := xStatistic.NewSession()
defer statictisSess.Close()
log.Info("delete all data from table: user_business_analysis_all")
statictisSess.Exec("delete from user_business_analysis_all")

currentTimeNow := time.Now()
pageStartTime := getLastCountDate()
pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix()

var cond = builder.NewCond()
cond = cond.And(
builder.Gte{"count_date": pageStartTime},
)
cond = cond.And(
builder.Lte{"count_date": pageEndTime},
)
userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc").
Find(&userBusinessAnalysisList); err != nil {
return
}
log.Info("query all data from table: user_business_analysis,len=" + fmt.Sprint(len(userBusinessAnalysisList)))
for _, userRecord := range userBusinessAnalysisList {
log.Info("insert to UserBusinessAnalysisAll table,user id=" + fmt.Sprint(userRecord.ID))
allData := getAllData(userRecord.ID, statictisSess)
allData.ID = userRecord.ID
allData.CountDate = 0
allData.DataDate = userRecord.DataDate
allData.Email = userRecord.Email
allData.OpenIIndex = userRecord.OpenIIndex
allData.GiteaAgeMonth = userRecord.GiteaAgeMonth
allData.Name = userRecord.Name
allData.RegistDate = userRecord.RegistDate

_, err := statictisSess.Insert(&allData)
if err != nil {
log.Info("insert all data failed." + err.Error())
}
}
log.Info("refresh all data finished.")
}

func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, endTime time.Time, isReCount bool) error { func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, endTime time.Time, isReCount bool) error {


log.Info("start to count other user info data") log.Info("start to count other user info data")
@@ -283,7 +406,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,


DataDate := startTime.Format("2006-01-02") DataDate := startTime.Format("2006-01-02")
CodeMergeCountMap := queryPullRequest(start_unix, end_unix) CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
CommitCountMap := queryAction(start_unix, end_unix, 5)
CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
IssueCountMap := queryAction(start_unix, end_unix, 6) IssueCountMap := queryAction(start_unix, end_unix, 6)


CommentCountMap := queryComment(start_unix, end_unix) CommentCountMap := queryComment(start_unix, end_unix)
@@ -410,10 +533,99 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("insert daterecord failed." + err.Error()) log.Info("insert daterecord failed." + err.Error())
return err return err
} }

if isExistUserInAllTable(dateRecord.ID, statictisSess) {
updateCurrentData(dateRecord.ID, statictisSess, dateRecord)
} else {
log.Info("insert to UserBusinessAnalysisAll table,user id=" + fmt.Sprint(dateRecord.ID))
allData := getAllData(dateRecord.ID, statictisSess)
allData.ID = dateRecord.ID
allData.CountDate = 0
allData.DataDate = dateRecord.DataDate
allData.Email = dateRecord.Email
allData.OpenIIndex = dateRecord.OpenIIndex
allData.GiteaAgeMonth = dateRecord.GiteaAgeMonth
allData.Name = dateRecord.Name
allData.RegistDate = dateRecord.RegistDate

_, err = statictisSess.Insert(&allData)
if err != nil {
log.Info("insert all data failed." + err.Error())
return err
}
}

} }
return nil return nil
} }


func updateCurrentData(userId int64, statictisSess *xorm.Session, currentData UserBusinessAnalysis) {

_, err := statictisSess.Update("update user_business_analysis_all set code_merge_count+=" + fmt.Sprint(currentData.CodeMergeCount) +
",commit_count+=" + fmt.Sprint(currentData.CommitCount) +
",issue_count+=" + fmt.Sprint(currentData.IssueCount) +
",comment_count+=" + fmt.Sprint(currentData.CommentCount) +
",focus_repo_count+=" + fmt.Sprint(currentData.FocusRepoCount) +
",star_repo_count+=" + fmt.Sprint(currentData.StarRepoCount) +
",watched_count+=" + fmt.Sprint(currentData.WatchedCount) +
",commit_code_size+=" + fmt.Sprint(currentData.CommitCodeSize) +
",commit_dataset_size+=" + fmt.Sprint(currentData.CommitDatasetSize) +
",commit_model_count+=" + fmt.Sprint(currentData.CommitModelCount) +
",solve_issue_count+=" + fmt.Sprint(currentData.SolveIssueCount) +
",encyclopedias_count+=" + fmt.Sprint(currentData.EncyclopediasCount) +
",create_repo_count+=" + fmt.Sprint(currentData.CreateRepoCount) +
",login_count+=" + fmt.Sprint(currentData.LoginCount) +
" where id=" + fmt.Sprint(userId))

if err != nil {
log.Info("update table failed." + err.Error())
}

}

func isExistUserInAllTable(userId int64, statictisSess *xorm.Session) bool {

allCount, err := statictisSess.Where("id=" + fmt.Sprint(userId)).Count(new(UserBusinessAnalysisAll))
if err != nil {
return false
}
return allCount > 0
}

func getAllData(userId int64, statictisSess *xorm.Session) UserBusinessAnalysisAll {
var dateRecord UserBusinessAnalysisAll

rows, err := statictisSess.Query("select sum(code_merge_count) as code_merge_count,sum(commit_count) as commit_count,sum(issue_count) as issue_count,sum(issue_count) as issue_count,sum(comment_count) as comment_count,sum(focus_repo_count) as focus_repo_count,sum(star_repo_count) as star_repo_count,sum(watched_count) as watched_count,sum(commit_code_size) as commit_code_size,sum(commit_dataset_size) as commit_dataset_size, sum(commit_model_count) as commit_model_count,sum(solve_issue_count) as solve_issue_count,sum(encyclopedias_count) as encyclopedias_count, sum(create_repo_count) as create_repo_count,sum(login_count) as login_count from public.user_business_analysis where id=" + fmt.Sprint(userId) + " group by id")
if err == nil {
for i, row := range rows {
log.Info("query user info, i=" + fmt.Sprint(i) + " code_merge_count=" + string(row["code_merge_count"]))
dateRecord.CodeMergeCount = getInt(string(row["code_merge_count"]))
dateRecord.CommitCount = getInt(string(row["commit_count"]))
dateRecord.IssueCount = getInt(string(row["issue_count"]))
dateRecord.CommentCount = getInt(string(row["comment_count"]))
dateRecord.FocusRepoCount = getInt(string(row["focus_repo_count"]))
dateRecord.StarRepoCount = getInt(string(row["star_repo_count"]))
dateRecord.WatchedCount = getInt(string(row["watched_count"]))
dateRecord.CommitCodeSize = getInt(string(row["commit_code_size"]))
dateRecord.CommitDatasetSize = getInt(string(row["commit_dataset_size"]))
dateRecord.CommitModelCount = getInt(string(row["commit_model_count"]))
dateRecord.SolveIssueCount = getInt(string(row["solve_issue_count"]))
dateRecord.EncyclopediasCount = getInt(string(row["encyclopedias_count"]))
dateRecord.CreateRepoCount = getInt(string(row["create_repo_count"]))
dateRecord.LoginCount = getInt(string(row["login_count"]))
}
}
return dateRecord
}

func getInt(str string) int {
re, err := strconv.ParseInt(str, 10, 32)
if err != nil {
return 0
}
return int(re)
}

func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) { func CounDataByDate(wikiCountMap map[string]int, startTime time.Time, endTime time.Time) {
CounDataByDateAndReCount(wikiCountMap, startTime, endTime, false) CounDataByDateAndReCount(wikiCountMap, startTime, endTime, false)
} }
@@ -462,6 +674,24 @@ func queryPullRequest(start_unix int64, end_unix int64) map[int64]int {
return resultMap return resultMap
} }


func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
sess.Select("id,user_id,op_type,act_user_id").Table("action").Where("user_id=act_user_id and op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix))
actionList := make([]*Action, 0)
sess.Find(&actionList)
resultMap := make(map[int64]int)
log.Info("query action size=" + fmt.Sprint(len(actionList)))
for _, actionRecord := range actionList {
if _, ok := resultMap[actionRecord.UserID]; !ok {
resultMap[actionRecord.UserID] = 1
} else {
resultMap[actionRecord.UserID] += 1
}
}
return resultMap
}

func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int { func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int {
sess := x.NewSession() sess := x.NewSession()
defer sess.Close() defer sess.Close()


+ 16
- 10
routers/repo/ai_model_manage.go View File

@@ -257,20 +257,26 @@ func DownloadMultiModelFile(ctx *context.Context) {
} }
} }


func QueryTrainJobVersionList(ctx *context.Context) {
log.Info("query train job version list. start.")
JobID := ctx.Query("JobID")

VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)

log.Info("query return count=" + fmt.Sprint(count))

if err != nil {
ctx.ServerError("QueryTrainJobList:", err)
} else {
ctx.JSON(200, VersionListTasks)
}
}

func QueryTrainJobList(ctx *context.Context) { func QueryTrainJobList(ctx *context.Context) {
log.Info("query train job list. start.") log.Info("query train job list. start.")
repoId := ctx.QueryInt64("repoId") repoId := ctx.QueryInt64("repoId")


VersionListTasks, count, err := models.CloudbrainsVersionList(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: -1,
PageSize: -1,
},
RepoID: repoId,
Type: -1,
JobType: "",
JobID: "",
})
VersionListTasks, count, err := models.QueryModelTrainJobList(repoId)


log.Info("query return count=" + fmt.Sprint(count)) log.Info("query return count=" + fmt.Sprint(count))




+ 23
- 34
routers/repo/user_data_analysis.go View File

@@ -4,7 +4,6 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"net/url" "net/url"
"strings"
"time" "time"


"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
@@ -16,17 +15,6 @@ import (
"github.com/360EntSecGroup-Skylar/excelize/v2" "github.com/360EntSecGroup-Skylar/excelize/v2"
) )


func QueryUserStaticData(ctx *context.Context) {
startDate := ctx.Query("startDate")
endDate := ctx.Query("endDate")
log.Info("startDate=" + startDate + " endDate=" + endDate)
startTime, _ := time.Parse("2006-01-02", startDate)
endTime, _ := time.Parse("2006-01-02", endDate)
endTime = endTime.AddDate(0, 0, 1)
log.Info("startTime=" + fmt.Sprint(startTime.Unix()) + " endDate=" + fmt.Sprint(endTime.Unix()))
ctx.JSON(http.StatusOK, models.QueryUserStaticData(startTime.Unix(), endTime.Unix()))
}

func QueryUserStaticDataPage(ctx *context.Context) { func QueryUserStaticDataPage(ctx *context.Context) {
startDate := ctx.Query("startDate") startDate := ctx.Query("startDate")
endDate := ctx.Query("endDate") endDate := ctx.Query("endDate")
@@ -50,6 +38,7 @@ func QueryUserStaticDataPage(ctx *context.Context) {
endTime = time.Now() endTime = time.Now()
} else { } else {
startTime, _ = time.ParseInLocation("2006-01-02", startDate, time.Local) startTime, _ = time.ParseInLocation("2006-01-02", startDate, time.Local)
startTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 12, 0, 0, 0, startTime.Location())
settingStartTime, _ := time.Parse("2006-01-02", setting.RadarMap.RecordBeginTime) settingStartTime, _ := time.Parse("2006-01-02", setting.RadarMap.RecordBeginTime)
if startTime.Unix() < settingStartTime.Unix() { if startTime.Unix() < settingStartTime.Unix() {
startTime = settingStartTime startTime = settingStartTime
@@ -57,6 +46,8 @@ func QueryUserStaticDataPage(ctx *context.Context) {
} }
endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local) endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local)
endTime = endTime.AddDate(0, 0, 1) endTime = endTime.AddDate(0, 0, 1)
endTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 23, 59, 59, 0, startTime.Location())

isAll = false isAll = false
log.Info("startTime=" + fmt.Sprint(startTime.Unix()) + " endDate=" + fmt.Sprint(endTime.Unix())) log.Info("startTime=" + fmt.Sprint(startTime.Unix()) + " endDate=" + fmt.Sprint(endTime.Unix()))
} }
@@ -76,11 +67,10 @@ func QueryUserStaticDataPage(ctx *context.Context) {
EndTime: endTime.Unix(), EndTime: endTime.Unix(),
IsAll: isAll, IsAll: isAll,
} }
mapInterface := make(map[string]interface{})
re, count := models.QueryUserStaticDataPage(pageOpts)
mapInterface["data"] = re
mapInterface["count"] = count

if IsReturnFile { if IsReturnFile {
re, count := models.QueryUserStaticDataAll(pageOpts)
log.Info("return count=" + fmt.Sprint(count))
//writer exec file. //writer exec file.
xlsx := excelize.NewFile() xlsx := excelize.NewFile()
sheetName := ctx.Tr("user.static.sheetname") sheetName := ctx.Tr("user.static.sheetname")
@@ -132,29 +122,14 @@ func QueryUserStaticDataPage(ctx *context.Context) {
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05") formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3]) xlsx.SetCellValue(sheetName, "P"+rows, formatTime[0:len(formatTime)-3])


formatTime = time.Unix(userRecord.CountDate, 0).Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "Q"+rows, formatTime[0:len(formatTime)-3])
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, "Q"+rows, formatTime+" 00:01")
} }


//设置默认打开的表单 //设置默认打开的表单
xlsx.SetActiveSheet(index) xlsx.SetActiveSheet(index)
var filename string

nowTime := time.Now()
nowZeroTime := time.Date(nowTime.Year(), nowTime.Month(), nowTime.Day(), 0, 0, 0, 0, nowTime.Location())
if endTime.Unix() >= nowZeroTime.Unix() {
endDate = nowZeroTime.AddDate(0, 0, -1).Format("2006-01-02")
}


if isAll {
filename = sheetName + "_" + ctx.Tr("user.static.all") + ".xlsx"
} else {
filename = sheetName + "_" + strings.ReplaceAll(startDate, "-", "") + "_" + strings.ReplaceAll(endDate, "-", "") + ".xlsx"
}

if len(userName) > 0 {
filename = sheetName + "_" + userName + "_" + strings.ReplaceAll(startDate, "-", "") + "_" + strings.ReplaceAll(endDate, "-", "") + ".xlsx"
}
filename := sheetName + "_" + ctx.Tr("user.static.all") + ".xlsx"


ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename))
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
@@ -163,11 +138,21 @@ func QueryUserStaticDataPage(ctx *context.Context) {
} }


} else { } else {
mapInterface := make(map[string]interface{})
re, count := models.QueryUserStaticDataPage(pageOpts)
mapInterface["data"] = re
mapInterface["count"] = count
ctx.JSON(http.StatusOK, mapInterface) ctx.JSON(http.StatusOK, mapInterface)
} }
} }


func TimingCountDataByDateAndReCount(date string, isReCount bool) { func TimingCountDataByDateAndReCount(date string, isReCount bool) {

if date == "refreshAll" {
models.RefreshUserStaticAllTabel()
return
}

t, _ := time.Parse("2006-01-02", date) t, _ := time.Parse("2006-01-02", date)
startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location()) startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())


@@ -220,6 +205,10 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) {
log.Error("count user info error." + err.Error()) log.Error("count user info error." + err.Error())
mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage) mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage)
} }

if isReCount {
models.RefreshUserStaticAllTabel()
}
} }


func TimingCountDataByDate(date string) { func TimingCountDataByDate(date string) {


+ 1
- 1
routers/routes/routes.go View File

@@ -792,7 +792,6 @@ func RegisterRoutes(m *macaron.Macaron) {
}, reqSignIn, context.RepoAssignment(), context.UnitTypes(), reqRepoAdmin, context.RepoRef()) }, reqSignIn, context.RepoAssignment(), context.UnitTypes(), reqRepoAdmin, context.RepoRef())


m.Post("/:username/:reponame/action/:action", reqSignIn, context.RepoAssignment(), context.UnitTypes(), repo.Action) m.Post("/:username/:reponame/action/:action", reqSignIn, context.RepoAssignment(), context.UnitTypes(), repo.Action)
m.Get("/tool/query_user_static", adminReq, repo.QueryUserStaticData)
m.Get("/tool/query_user_static_page", adminReq, repo.QueryUserStaticDataPage) m.Get("/tool/query_user_static_page", adminReq, repo.QueryUserStaticDataPage)
// Grouping for those endpoints not requiring authentication // Grouping for those endpoints not requiring authentication
m.Group("/:username/:reponame", func() { m.Group("/:username/:reponame", func() {
@@ -975,6 +974,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Put("/modify_model", repo.ModifyModelInfo) m.Put("/modify_model", repo.ModifyModelInfo)
m.Get("/show_model", reqRepoCloudBrainReader, repo.ShowModelPageInfo) m.Get("/show_model", reqRepoCloudBrainReader, repo.ShowModelPageInfo)
m.Get("/query_train_job", reqRepoCloudBrainReader, repo.QueryTrainJobList) m.Get("/query_train_job", reqRepoCloudBrainReader, repo.QueryTrainJobList)
m.Get("/query_train_job_version", reqRepoCloudBrainReader, repo.QueryTrainJobVersionList)
m.Group("/:ID", func() { m.Group("/:ID", func() {
m.Get("", reqRepoCloudBrainReader, repo.ShowSingleModel) m.Get("", reqRepoCloudBrainReader, repo.ShowSingleModel)
m.Get("/downloadsingle", reqRepoCloudBrainReader, repo.DownloadSingleModelFile) m.Get("/downloadsingle", reqRepoCloudBrainReader, repo.DownloadSingleModelFile)


Loading…
Cancel
Save