diff --git a/custom/public/rotation3D/rotation3D.js b/custom/public/rotation3D/rotation3D.js
index da37ee43b..8ec5b6938 100644
--- a/custom/public/rotation3D/rotation3D.js
+++ b/custom/public/rotation3D/rotation3D.js
@@ -154,7 +154,7 @@ var Rotation3D = window.Rotation3D = function (_opts) {
* y y起点 + (尺寸 * 正弦 * y压缩) - 元素宽度一半
*/
Rotation3D.prototype.itemStyle = function($item, index, rotation) {
- // console.log(rotation)
+ console.log("itemStyle=" + rotation + " index=" + index);
var parseSXY = circleMath.parseSXY(rotation, this);
var scale = parseSXY.scale;
var x = parseSXY.x;
@@ -219,7 +219,7 @@ Rotation3D.prototype.itemStyle = function($item, index, rotation) {
*/
Rotation3D.prototype.lineStyle = function($line, index, rotation) {
var rotate = circleMath.parseRotate(rotation, this)
- // console.log(rotate)
+ console.log("lineStyle=" + rotation + " index=" + index);
$line.css({
transform: 'rotate(' + rotate + 'deg)',
@@ -235,7 +235,7 @@ Rotation3D.prototype.lineStyle = function($line, index, rotation) {
Rotation3D.prototype.goTo = function (index) {
var self = this;
this.currenIndex = index;
- console.log('currenIndex', index);
+ console.log('goTo currenIndex', index);
/**
* 1.计算floatIndex,用于控死amdiff
*/
@@ -293,7 +293,7 @@ Rotation3D.prototype.scheduleNextFrame = function () {
*/
Rotation3D.prototype.render = function () {
var self=this;
-
+
// 图形间隔:弧度
var spacing = 2 * Math.PI / this.$item.length;
var itemRotation = this.rotation;
@@ -319,6 +319,7 @@ Rotation3D.prototype.onAutoPlay = function () {
if (self.currenIndex < 0) {
self.currenIndex = self.length - 1
}
+ console.log("autoPlayTimer....");
self.goTo(self.currenIndex);
self.currenIndex--; //倒叙
}, this.autoPlayDelay)
@@ -330,12 +331,12 @@ Rotation3D.prototype.onAutoPlay = function () {
Rotation3D.prototype.onDrag = function () {
var self = this;
var startX, startY, moveX, moveY, endX, endY;
-
+ console.log("onDrag....");
// 拖拽:三个事件-按下 移动 抬起
//按下
this.$rotation.mousedown(function (e) {
startX = e.pageX; startY = e.pageY;
-
+ console.log("mousedown....");
// 移动
$(document).mousemove(function (e) {
// console.log('移动');
@@ -347,7 +348,7 @@ Rotation3D.prototype.onDrag = function () {
$(document).mouseup(function (e) {
endX = e.pageX; endY = e.pageY;
moveX = endX - startX; moveY = endY - startY;
-
+ console.log("mouseup....");
// 每40旋转一步
var moveIndex = parseInt(Math.abs(moveX) / 50)
console.log('moveIndex',moveIndex)
diff --git a/models/cloudbrain_static.go b/models/cloudbrain_static.go
new file mode 100644
index 000000000..31f66d4fc
--- /dev/null
+++ b/models/cloudbrain_static.go
@@ -0,0 +1,22 @@
+package models
+
+import "code.gitea.io/gitea/modules/log"
+
+func GetAllStatusCloudBrain() map[string]int {
+ sess := x.NewSession()
+ defer sess.Close()
+ cloudbrains := make([]*CloudbrainInfo, 0)
+ if err := sess.Table(&Cloudbrain{}).Unscoped().
+ Find(&cloudbrains); err != nil {
+ log.Info("find error.")
+ }
+ cloudBrainStatusResult := make(map[string]int)
+ for _, cloudbrain := range cloudbrains {
+ if _, ok := cloudBrainStatusResult[cloudbrain.Status]; !ok {
+ cloudBrainStatusResult[cloudbrain.Status] = 1
+ } else {
+ cloudBrainStatusResult[cloudbrain.Status] += 1
+ }
+ }
+ return cloudBrainStatusResult
+}
diff --git a/models/custom_migrations.go b/models/custom_migrations.go
index d0158530b..412bedce1 100644
--- a/models/custom_migrations.go
+++ b/models/custom_migrations.go
@@ -1,8 +1,6 @@
package models
import (
- "fmt"
-
"code.gitea.io/gitea/modules/log"
"xorm.io/xorm"
)
@@ -22,7 +20,6 @@ var customMigrations = []CustomMigration{
}
var customMigrationsStatic = []CustomMigrationStatic{
- {"Delete organization user history data ", deleteNotDisplayUser},
{"update issue_fixed_rate to 1 if num_issues is 0 ", updateIssueFixedRate},
}
@@ -36,7 +33,6 @@ func MigrateCustom(x *xorm.Engine) {
}
}
-
}
func MigrateCustomStatic(x *xorm.Engine, static *xorm.Engine) {
@@ -58,24 +54,6 @@ func syncTopicStruct(x *xorm.Engine) error {
return err
}
-func deleteNotDisplayUser(x *xorm.Engine, static *xorm.Engine) error {
-
- querySQL := "select id,name from public.user where type=1"
- rows, err := x.Query(querySQL)
- if err != nil {
- log.Info("select db failed,err:", err)
- return err
- }
-
- for i, userRow := range rows {
- log.Info("delete zuzi user, i=" + fmt.Sprint(i) + " userName=" + string(userRow["name"]))
- deleteSql := "delete from user_business_analysis where id=" + string(userRow["id"]) + " and name='" + string(userRow["name"]) + "'"
- static.Exec(deleteSql)
- }
-
- return nil
-}
-
func updateIssueFixedRate(x *xorm.Engine, static *xorm.Engine) error {
updateSQL := "update repo_statistic set issue_fixed_rate=1.0 where num_issues=0"
_, err := static.Exec(updateSQL)
diff --git a/models/dataset.go b/models/dataset.go
index d3a142742..d4a7748d3 100755
--- a/models/dataset.go
+++ b/models/dataset.go
@@ -1,10 +1,12 @@
package models
import (
- "code.gitea.io/gitea/modules/log"
"errors"
"fmt"
"sort"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/timeutil"
"xorm.io/builder"
@@ -179,7 +181,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.Cond {
if len(opts.Keyword) > 0 {
- cond = cond.And(builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword}))
+ cond = cond.And(builder.Or(builder.Like{"LOWER(dataset.title)", strings.ToLower(opts.Keyword)}, builder.Like{"LOWER(dataset.description)", strings.ToLower(opts.Keyword)}))
}
if len(opts.Category) > 0 {
diff --git a/models/models.go b/models/models.go
index 2a2e119fb..9d255c5e6 100755
--- a/models/models.go
+++ b/models/models.go
@@ -157,6 +157,7 @@ func init() {
new(UserBusinessAnalysisCurrentMonth),
new(UserBusinessAnalysisCurrentWeek),
new(UserBusinessAnalysisYesterday),
+ new(UserBusinessAnalysisLastWeek),
new(UserLoginLog),
new(UserMetrics),
new(UserAnalysisPara),
diff --git a/models/org.go b/models/org.go
index 2a6528023..c956f1f89 100755
--- a/models/org.go
+++ b/models/org.go
@@ -160,7 +160,11 @@ func UpdateOrgStatistics() {
has, _ := x.Get(orgStat)
orgStat.NumScore = numScore
- if has {
+
+ count, err := GetPublicRepositoryCount(&org)
+ if err != nil || count == 0 {
+ x.ID(orgStat.ID).Delete(new(OrgStatistic))
+ } else if has {
x.ID(orgStat.ID).Cols("num_score").Update(&orgStat)
} else {
x.Insert(orgStat)
diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go
index ac39a8de7..cbe00b9d9 100644
--- a/models/repo_activity_custom.go
+++ b/models/repo_activity_custom.go
@@ -211,7 +211,7 @@ func setKeyContributerDict(contributorDistinctDict map[string]int, email string,
}
}
-func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) {
+func GetAllUserKPIStats(startTime time.Time, endTime time.Time) (map[string]*git.UserKPIStats, error) {
authors := make(map[string]*git.UserKPIStats)
repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name")
if err != nil {
@@ -219,7 +219,7 @@ func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) {
}
for _, repository := range repositorys {
- authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath())
+ authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath(), startTime, endTime)
if err1 != nil {
log.Warn("get user kpi status err:"+repository.RepoPath(), err1.Error())
continue
diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go
index 56e591dea..ec9cf25fe 100644
--- a/models/user_business_analysis.go
+++ b/models/user_business_analysis.go
@@ -407,15 +407,147 @@ func QueryUserStaticDataAll(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusi
return userBusinessAnalysisReturnList, allCount
}
+func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wikiCountMap map[string]int) ([]*UserBusinessAnalysis, int64) {
+ log.Info("start to count other user info data")
+ sess := x.NewSession()
+ defer sess.Close()
+
+ currentTimeNow := time.Now()
+ log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05"))
+
+ start_unix := opts.StartTime
+
+ end_unix := opts.EndTime
+ CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location())
+ DataDate := currentTimeNow.Format("2006-01-02 15:04")
+
+ CodeMergeCountMap := queryPullRequest(start_unix, end_unix)
+ CommitCountMap := queryCommitAction(start_unix, end_unix, 5)
+ IssueCountMap := queryCreateIssue(start_unix, end_unix)
+
+ CommentCountMap := queryComment(start_unix, end_unix)
+ FocusRepoCountMap := queryWatch(start_unix, end_unix)
+ StarRepoCountMap := queryStar(start_unix, end_unix)
+ WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
+
+ StartTime := time.Unix(start_unix, 0)
+ EndTime := time.Unix(end_unix, 0)
+ CommitCodeSizeMap, err := GetAllUserKPIStats(StartTime, EndTime)
+ if err != nil {
+ log.Info("query commit code errr.")
+ } else {
+ log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
+ CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
+ log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
+ }
+ CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
+ SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
+ CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
+ LoginCountMap := queryLoginCount(start_unix, end_unix)
+ OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
+ CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
+ AiModelManageMap := queryUserModel(start_unix, end_unix)
+
+ CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
+ RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
+ CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
+ RecommendImage := queryRecommedImage(start_unix, end_unix)
+
+ statictisSess := xStatistic.NewSession()
+ defer statictisSess.Close()
+
+ cond := "type != 1 and is_active=true"
+ count, err := sess.Where(cond).Count(new(User))
+
+ ParaWeight := getParaWeight()
+ ResultList := make([]*UserBusinessAnalysis, 0)
+ var indexTotal int64
+ indexTotal = 0
+ for {
+ sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ userList := make([]*User, 0)
+ sess.Find(&userList)
+
+ for i, userRecord := range userList {
+ var dateRecord UserBusinessAnalysis
+ dateRecord.ID = userRecord.ID
+ log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name)
+ dateRecord.CountDate = CountDate.Unix()
+ dateRecord.DataDate = DataDate
+ dateRecord.Email = userRecord.Email
+ dateRecord.RegistDate = userRecord.CreatedUnix
+ dateRecord.Name = userRecord.Name
+ dateRecord.UserLocation = userRecord.Location
+ dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
+
+ dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap)
+ dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap)
+ dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap)
+ dateRecord.CommentCount = getMapValue(dateRecord.ID, CommentCountMap)
+ dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
+ dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
+ dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
+ dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap)
+ if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
+ dateRecord.CommitCodeSize = 0
+ } else {
+ dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
+ }
+ dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
+ dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
+ dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
+
+ dateRecord.EncyclopediasCount = getMapKeyStringValue(dateRecord.Name, wikiCountMap)
+
+ dateRecord.CreateRepoCount = getMapValue(dateRecord.ID, CreateRepoCountMap)
+
+ dateRecord.LoginCount = getMapValue(dateRecord.ID, LoginCountMap)
+
+ if _, ok := OpenIIndexMap[dateRecord.ID]; !ok {
+ dateRecord.OpenIIndex = 0
+ } else {
+ dateRecord.OpenIIndex = OpenIIndexMap[dateRecord.ID]
+ }
+
+ dateRecord.CloudBrainTaskNum = getMapValue(dateRecord.ID, CloudBrainTaskMap)
+ dateRecord.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuDebugJob", CloudBrainTaskItemMap)
+ dateRecord.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuDebugJob", CloudBrainTaskItemMap)
+ dateRecord.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuTrainJob", CloudBrainTaskItemMap)
+ dateRecord.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
+ dateRecord.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
+ dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
+ dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
+
+ dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
+ dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
+ dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
+ dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage)
+ dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage)
+ dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage)
+
+ dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight)
+ ResultList = append(ResultList, &dateRecord)
+ }
+
+ indexTotal += PAGE_SIZE
+ if indexTotal >= count {
+ break
+ }
+ }
+ log.Info("query user define,count=" + fmt.Sprint(len(ResultList)))
+ return ResultList, int64(len(ResultList))
+}
+
func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBusinessAnalysis, int64) {
log.Info("query startTime =" + fmt.Sprint(opts.StartTime) + " endTime=" + fmt.Sprint(opts.EndTime) + " isAll=" + fmt.Sprint(opts.IsAll))
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()
- currentTimeNow := time.Now()
- pageStartTime := getLastCountDate()
- pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix()
+ //currentTimeNow := time.Now()
+ //pageStartTime := getLastCountDate()
+ //pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()).Unix()
var cond = builder.NewCond()
if len(opts.UserName) > 0 {
@@ -424,10 +556,10 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
)
}
cond = cond.And(
- builder.Gte{"count_date": pageStartTime},
+ builder.Gte{"count_date": opts.StartTime},
)
cond = cond.And(
- builder.Lte{"count_date": pageEndTime},
+ builder.Lte{"count_date": opts.EndTime},
)
count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis))
@@ -447,7 +579,7 @@ func QueryUserStaticDataPage(opts *UserBusinessAnalysisQueryOptions) ([]*UserBus
}
userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0)
- if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("id desc").
+ if err := statictisSess.Table("user_business_analysis").Where(cond).OrderBy("count_date,id desc").
Find(&userBusinessAnalysisList); err != nil {
return nil, 0
}
@@ -532,10 +664,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("truncate all data from table: " + tableName)
statictisSess.Exec("TRUNCATE TABLE " + tableName)
- StartTimeNextDay := pageStartTime.AddDate(0, 0, 1)
- EndTimeNextDay := pageEndTime.AddDate(0, 0, 1)
- log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05"))
- log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05"))
+ log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
+ log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
start_unix := pageStartTime.Unix()
end_unix := pageEndTime.Unix()
@@ -551,7 +681,15 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
- CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
+ CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime)
+ if err != nil {
+ log.Info("query commit code errr.")
+ } else {
+ log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap)))
+ CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap)
+ log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson))
+ }
+ //CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
@@ -605,7 +743,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap)
dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap)
dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap)
- dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
+ if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok {
+ dateRecordAll.CommitCodeSize = 0
+ } else {
+ dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines)
+ }
+ //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap)
dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap)
dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap)
@@ -626,6 +769,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap)
dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
+ dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
@@ -733,7 +877,12 @@ func RefreshUserStaticAllTabel(wikiCountMap map[string]int, userMetrics map[stri
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset)
refreshUserStaticTable(wikiCountMap, "user_business_analysis_current_week", pageStartTime, pageEndTime, userMetrics)
+ pageEndTime = pageStartTime
+ pageStartTime = pageStartTime.AddDate(0, 0, -7)
+ refreshUserStaticTable(wikiCountMap, "user_business_analysis_last_week", pageStartTime, pageEndTime, userMetrics)
+
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30)
+ pageEndTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location())
refreshUserStaticTable(wikiCountMap, "user_business_analysis_last30_day", pageStartTime, pageEndTime, userMetrics)
pageStartTime = time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -1)
@@ -774,7 +923,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
- CommitCodeSizeMap, err := GetAllUserKPIStats()
+ CommitCodeSizeMap, err := GetAllUserKPIStats(startTime, endTime)
if err != nil {
log.Info("query commit code errr.")
} else {
@@ -825,7 +974,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.Name = userRecord.Name
dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime())
dateRecord.DataDate = DataDate
-
+ dateRecord.UserLocation = userRecord.Location
dateRecord.CodeMergeCount = getMapValue(dateRecord.ID, CodeMergeCountMap)
dateRecord.CommitCount = getMapValue(dateRecord.ID, CommitCountMap)
dateRecord.IssueCount = getMapValue(dateRecord.ID, IssueCountMap)
@@ -878,15 +1027,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
log.Info("has activity." + userRecord.Name)
addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID)
}
- if userRecord.IsActive {
- continue
- }
- statictisSess.Delete(&dateRecord)
- _, err = statictisSess.Insert(&dateRecord)
- if err != nil {
- log.Info("insert daterecord failed." + err.Error())
- return err
- }
+
}
indexTotal += PAGE_SIZE
@@ -971,7 +1112,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en
//HasActivityUser int `xorm:"NOT NULL DEFAULT 0"`
//TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"`
//TotalHasActivityUser
- regist_time := user.CreatedUnix.AsTime().Unix()
+ regist_time := int64(user.CreatedUnix)
if regist_time >= start_time && regist_time <= end_time {
if user.IsActive {
userMetrics["ActivateRegistUser"] = getMapKeyStringValue("ActivateRegistUser", userMetrics) + 1
@@ -1013,7 +1154,7 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
// 登录次数 0.10
result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
- log.Info("1 result=" + fmt.Sprint(result))
+ //log.Info("1 result=" + fmt.Sprint(result))
result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
@@ -1096,7 +1237,7 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
// 登录次数 0.10
result = float64(dateRecord.CodeMergeCount) * getParaWeightValue("CodeMergeCount", ParaWeight, 0.2)
result += float64(dateRecord.CommitCount) * getParaWeightValue("CommitCount", ParaWeight, 0.2)
- log.Info("2 result=" + fmt.Sprint(result))
+ //log.Info("2 result=" + fmt.Sprint(result))
result += float64(dateRecord.IssueCount) * getParaWeightValue("IssueCount", ParaWeight, 0.2)
result += float64(dateRecord.CommentCount) * getParaWeightValue("CommentCount", ParaWeight, 0.2)
result += float64(dateRecord.FocusRepoCount) * getParaWeightValue("FocusRepoCount", ParaWeight, 0.1)
@@ -1640,7 +1781,7 @@ func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64
var indexTotal int64
indexTotal = 0
for {
- sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ sess.Select("id,uid,image_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageStarList := make([]*ImageStar, 0)
sess.Find(&imageStarList)
log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList)))
@@ -1908,7 +2049,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s
var indexTotal int64
indexTotal = 0
for {
- sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
+ sess.Select("id,job_type,user_id,duration,train_job_duration,type").Table("cloudbrain").Unscoped().Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
cloudTaskList := make([]*Cloudbrain, 0)
sess.Find(&cloudTaskList)
log.Info("query cloudbrain size=" + fmt.Sprint(len(cloudTaskList)))
diff --git a/models/user_business_struct.go b/models/user_business_struct.go
index 70f806c78..fec361bca 100644
--- a/models/user_business_struct.go
+++ b/models/user_business_struct.go
@@ -394,6 +394,72 @@ type UserBusinessAnalysisYesterday struct {
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}
+type UserBusinessAnalysisLastWeek struct {
+ ID int64 `xorm:"pk"`
+ CountDate int64 `xorm:"pk"`
+ //action :ActionMergePullRequest // 11
+ CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"`
+ //action :ActionCommitRepo
+ CommitCount int `xorm:"NOT NULL DEFAULT 0"`
+ //issue // 10
+ IssueCount int `xorm:"NOT NULL DEFAULT 0"`
+ //comment table current date
+ CommentCount int `xorm:"NOT NULL DEFAULT 0"`
+ //watch table current date
+ FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"`
+ //star table current date
+ StarRepoCount int `xorm:"NOT NULL DEFAULT 0"`
+ //follow table
+ WatchedCount int `xorm:"NOT NULL DEFAULT 0"`
+ // user table
+ GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"`
+ //
+ CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"`
+ //attachement table
+ CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"`
+ //0
+ CommitModelCount int `xorm:"NOT NULL DEFAULT 0"`
+ //issue, issueassignees
+ SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"`
+ //baike
+ EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"`
+ //user
+ RegistDate timeutil.TimeStamp `xorm:"NOT NULL"`
+ //repo
+ CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"`
+ //login count, from elk
+ LoginCount int `xorm:"NOT NULL DEFAULT 0"`
+ //openi index
+ OpenIIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ //user
+ Email string `xorm:"NOT NULL"`
+ //user
+ Name string `xorm:"NOT NULL"`
+ DataDate string `xorm:"NULL"`
+
+ CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
+ GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
+ NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
+ GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
+ CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
+ CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
+ UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
+ UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
+
+ UserLocation string `xorm:"NULL"`
+
+ FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
+ CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
+ CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
+ RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
+ CollectImage int `xorm:"NOT NULL DEFAULT 0"`
+ CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
+ RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
+}
+
type UserAnalysisPara struct {
Key string `xorm:"NOT NULL"`
Value float64 `xorm:"NOT NULL DEFAULT 0"`
diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go
index d70a17052..1a7b657d5 100644
--- a/modules/git/repo_stats_custom.go
+++ b/modules/git/repo_stats_custom.go
@@ -58,12 +58,11 @@ func SetDevelopAge(repoPath string, stats *RepoKPIStats, fromTime time.Time) err
return nil
}
-//获取一天内的用户贡献指标
-func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) {
- timeUntil := time.Now()
- oneDayAgo := timeUntil.AddDate(0, 0, -1)
- since := oneDayAgo.Format(time.RFC3339)
- args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)}
+func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (map[string]*UserKPIStats, error) {
+
+ after := startTime.Format(time.RFC3339)
+ until := endTime.Format(time.RFC3339)
+ args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)}
stdout, err := NewCommand(args...).RunInDirBytes(repoPath)
if err != nil {
return nil, err
@@ -124,6 +123,14 @@ func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) {
}
+//获取一天内的用户贡献指标
+func getUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) {
+ timeUntil := time.Now()
+ oneDayAgo := timeUntil.AddDate(0, 0, -1)
+
+ return GetUserKPIStats(repoPath, oneDayAgo, oneDayAgo)
+}
+
func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error {
since := fromTime.Format(time.RFC3339)
args := []string{"log", "--numstat", "--no-merges", "HEAD", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)}
diff --git a/modules/repofiles/update.go b/modules/repofiles/update.go
index d7751d50e..07440301a 100644
--- a/modules/repofiles/update.go
+++ b/modules/repofiles/update.go
@@ -783,6 +783,7 @@ func RenameRepoFile(repo *models.Repository, doer *models.User, opts *RenameRepo
// Check that the path given in opts.treePath is valid (not a git path)
treePath := CleanUploadFileName(opts.TreePath)
+ treePath = strings.ReplaceAll(treePath, " ", "")
if treePath == "" {
return models.ErrFilenameInvalid{
Path: opts.TreePath,
@@ -942,16 +943,16 @@ func moveAndAddFiles(oldTreePath, newTreePath string, t *TemporaryUploadReposito
}
//example for v(mode SHA-1 stage file)
//100755 d294c88235ac05d3dece028d8a65590f28ec46ac 0 custom/conf/app.ini
- v = strings.ReplaceAll(v, "0\t", "")
- tmpArray := strings.Split(v, " ")
- oldPath := tmpArray[2]
+ tempArray := strings.Split(v, "0\t")
+ leftArray := strings.Split(tempArray[0], " ")
+ oldPath := tempArray[1]
newPath := newTreePath + strings.TrimPrefix(oldPath, oldTreePath)
// mode 0 means remove file
stdIn.WriteString("0 0000000000000000000000000000000000000000\t")
stdIn.WriteString(oldPath)
stdIn.WriteByte('\000')
- stdIn.WriteString(tmpArray[0] + " ")
- stdIn.WriteString(tmpArray[1] + "\t")
+ stdIn.WriteString(leftArray[0] + " ")
+ stdIn.WriteString(leftArray[1] + "\t")
stdIn.WriteString(newPath)
stdIn.WriteByte('\000')
}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index ef3ad7705..15afb37ca 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -519,15 +519,18 @@ static.RecommendDataset=Recommended Dataset Count
static.CollectImage=Collect Image Count
static.CollectedImage=Collected Image Count
static.RecommendImage=Recommended Image Count
+static.email=Email
+static.location=Location
static.all=All
static.public.user_business_analysis_current_month=Current_Month
static.public.user_business_analysis_current_week=Current_Week
+static.public.user_business_analysis_last_week=Last_Week
static.public.user_business_analysis_current_year=Current_Year
static.public.user_business_analysis_last30_day=Last_30_day
static.public.user_business_analysis_last_month=Last_Month
static.public.user_business_analysis_yesterday=Yesterday
static.public.user_business_analysis_all=All
-
+static.downloadinfo=Due to the large amount of data generated in the customized time period and long calculation time, please download and export the data from the following address and export the data download address:
metrics.sheetname=User Trend Analysis
metrics.date=Count Date
metrics.newregistuser=New registered user
@@ -536,6 +539,7 @@ metrics.hasactivateuser=New contributing activities
metrics.newregistnotactiveuser=New inactive
metrics.averageuser=Average new users
metrics.newuseractiveindex=Activation rate of new users
+metrics.currentdayactivity=Current day contributing activities
metrics.totalregistuser=Cumulative registered users
metrics.totalactiveduser=Cumulative activated users
metrics.totalhasactivityuser=Cumulative active users
@@ -2945,6 +2949,7 @@ raw_minutes = minutes
[dropzone]
default_message = Drop files or click here to upload.
+default_dataset_message = Click to add files or directly drag and drop files here
invalid_input_type = You can not upload files of this type.
file_too_big = File size ({{filesize}} MB) exceeds the maximum size of ({{maxFilesize}} MB).
remove_file = Remove file
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index 2f0bbe91d..fc5f89ccb 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -524,15 +524,18 @@ static.RecommendDataset=被推荐数据集数
static.CollectImage=收藏镜像数
static.CollectedImage=被收藏镜像数
static.RecommendImage=被推荐镜像数
+static.email=Email
+static.location=所在地区
static.all=所有
static.public.user_business_analysis_current_month=本月
static.public.user_business_analysis_current_week=本周
+static.public.user_business_analysis_last_week=上周
static.public.user_business_analysis_current_year=今年
static.public.user_business_analysis_last30_day=近30天
static.public.user_business_analysis_last_month=上月
static.public.user_business_analysis_yesterday=昨天
static.public.user_business_analysis_all=所有
-
+static.downloadinfo=因自定义时间段产生的数据量比较大,计算时间比较长,请您从如下地址下载导出数据,导出数据下载地址:
metrics.sheetname=用户趋势分析
metrics.date=日期
metrics.newregistuser=新增注册用户
@@ -541,6 +544,7 @@ metrics.hasactivateuser=新增有贡献活动
metrics.newregistnotactiveuser=新增未激活
metrics.averageuser=平均新增用户
metrics.newuseractiveindex=新增用户激活率
+metrics.currentdayactivity=当日有贡献活动
metrics.totalregistuser=累计注册用户
metrics.totalactiveduser=累计已激活
metrics.totalhasactivityuser=累计有贡献活动
@@ -2955,6 +2959,7 @@ raw_minutes=分钟
[dropzone]
default_message=拖动文件或者点击此处上传。
+default_dataset_message=点击添加文件或直接拖拽文件到此处。
invalid_input_type=您不能上传该类型的文件
file_too_big=文件体积({{filesize}} MB)超过了最大允许体积({{maxFilesize}} MB)
remove_file=移除文件
diff --git a/package-lock.json b/package-lock.json
index 9233b813b..d65d6b49a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -5439,7 +5439,7 @@
"gulp-tap": "^1.0.1",
"gulp-uglify": "^3.0.1",
"inquirer": "^6.2.1",
- "jquery": "^3.4.0",
+ "jquery": "^3.5.1",
"less": "^3.7.0",
"map-stream": "^0.1.0",
"merge-stream": "^2.0.0",
diff --git a/public/home/home.js b/public/home/home.js
index 1f11b9a4f..33c7edc47 100755
--- a/public/home/home.js
+++ b/public/home/home.js
@@ -117,6 +117,7 @@ socket.onmessage = function (e) {
continue;
}
}
+ refresh3DInfo(record);
var recordPrefix = getMsg(record);
if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){
html += recordPrefix + actionName;
@@ -200,6 +201,29 @@ function getTaskLink(record){
return re;
}
+function refresh3DInfo(record){
+ if(record.OpType == "25" || record.OpType == "29" || record.OpType == "31"){
+ //cloudbrain one
+ var lines = $('.rotation3D__line');
+ var span = $('.rotation3D__line').find("span")[0];
+ console.log(span);
+ span.innerText =record.RefName;
+ //$('.rotation3D__line').find("span").eq(0).text(record.RefName)
+ console.log("cloudbrain one line length=" + lines.length);
+ //lines[0].find("span").text(record.RefName);
+ }else if(record.OpType == "26" || record.OpType == "27" || record.OpType == "28"){
+ //cloudbrain two
+ var lines = $('.rotation3D__line');
+ console.log("cloudbrain two line length=" + lines.length);
+ var span = $('.rotation3D__line').find("span")[1];
+ console.log(span);
+ span.innerText =record.RefName;
+ //$('.rotation3D__line').find("span").eq(1).text(record.RefName)
+ //lines[1].find("span").text(record.RefName);
+ }
+
+}
+
function getMsg(record){
var html ="";
html += "
";
@@ -418,48 +442,57 @@ queryRecommendData();
function queryRecommendData(){
$.ajax({
type:"GET",
- url:"/recommend/org",
+ url:"/recommend/home",
headers: {
authorization:token,
},
dataType:"json",
async:false,
success:function(json){
- displayOrg(json);
+ displayOrg(json.org);
+ displayRepo(json.repo);
+ displayActivity(json.image);
+ displayCloudBrain(json.cloudbrain)
},
error:function(response) {
}
});
- $.ajax({
- type:"GET",
- url:"/recommend/repo",
- headers: {
- authorization:token,
- },
- dataType:"json",
- async:false,
- success:function(json){
- displayRepo(json);
- },
- error:function(response) {
- }
- });
+ // $.ajax({
+ // type:"GET",
+ // url:"/recommend/repo",
+ // headers: {
+ // authorization:token,
+ // },
+ // dataType:"json",
+ // async:false,
+ // success:function(json){
+ // displayRepo(json);
+ // },
+ // error:function(response) {
+ // }
+ // });
+
+ // $.ajax({
+ // type:"GET",
+ // url:"/recommend/imageinfo",
+ // headers: {
+ // authorization:token,
+ // },
+ // dataType:"json",
+ // async:false,
+ // success:function(json){
+ // displayActivity(json);
+ // },
+ // error:function(response) {
+ // }
+ // });
+}
- $.ajax({
- type:"GET",
- url:"/recommend/imageinfo",
- headers: {
- authorization:token,
- },
- dataType:"json",
- async:false,
- success:function(json){
- displayActivity(json);
- },
- error:function(response) {
- }
- });
+function displayCloudBrain(json){
+ $('#completed_task').text(json.completed_task);
+ $('#running_task').text(json.running_task);
+ $('#wait_task').text(json.wait_task);
}
function displayActivity(json){
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index d2c6e3633..17122b09d 100755
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -559,10 +559,12 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/query_metrics_all", operationReq, repo_ext.QueryUserMetricsAll)
m.Get("/query_user_metrics_page", operationReq, repo_ext.QueryUserMetricDataPage)
+ m.Get("/download_user_define_file", operationReq, repo_ext.DownloadUserDefineFile)
m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList)
m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage)
m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth)
m.Get("/query_user_current_week", operationReq, repo_ext.QueryUserStaticCurrentWeek)
+ m.Get("/query_user_last_week", operationReq, repo_ext.QueryUserStaticLastWeek)
m.Get("/query_user_current_year", operationReq, repo_ext.QueryUserStaticCurrentYear)
m.Get("/query_user_last30_day", operationReq, repo_ext.QueryUserStaticLast30Day)
m.Get("/query_user_last_month", operationReq, repo_ext.QueryUserStaticLastMonth)
@@ -571,6 +573,9 @@ func RegisterRoutes(m *macaron.Macaron) {
//cloudbrain board
m.Group("/cloudbrainboard", func() {
m.Get("/downloadAll", repo.DownloadCloudBrainBoard)
+ m.Group("/cloudbrain", func() {
+ m.Get("/status_analysis", repo.GetCloudbrainsStatusAnalysis)
+ })
}, operationReq)
// Users
m.Group("/users", func() {
diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go
index 2090a2cf2..0710a21e4 100644
--- a/routers/api/v1/repo/cloudbrain_dashboard.go
+++ b/routers/api/v1/repo/cloudbrain_dashboard.go
@@ -11,6 +11,16 @@ import (
"github.com/360EntSecGroup-Skylar/excelize/v2"
)
+type CloudbrainsStatusAnalysis struct {
+ JobWaitingCount int64 `json:"jobWaitingCount"`
+ JobRunningCount int64 `json:"jobRunningCount"`
+ JobStoppedCount int64 `json:"jobStoppedCount"`
+ JobCompletedCount int64 `json:"jobCompletedCount"`
+ JobFailedCount int64 `json:"jobFailedCount"`
+ JobKilledCount int64 `json:"jobKilledCount"`
+ JobInitCount int64 `json:"jobInitCount"`
+}
+
func DownloadCloudBrainBoard(ctx *context.Context) {
page := 1
@@ -133,3 +143,9 @@ func getBrainWaitTime(rs *models.CloudbrainInfo) string {
return models.ConvertDurationToStr(int64(waitTime))
}
}
+func GetCloudbrainsStatusAnalysis(ctx *context.Context) {
+ cloudBrainStatusResult := models.GetAllStatusCloudBrain()
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "cloudBrainStatusResult": cloudBrainStatusResult,
+ })
+}
diff --git a/routers/home.go b/routers/home.go
index e37cacb01..1b02c43ad 100755
--- a/routers/home.go
+++ b/routers/home.go
@@ -7,6 +7,7 @@ package routers
import (
"bytes"
+ "fmt"
"net/http"
"strconv"
"strings"
@@ -99,6 +100,12 @@ func setRecommendURL(ctx *context.Context) {
func Dashboard(ctx *context.Context) {
if ctx.IsSigned {
+ pictureInfo, err := getImageInfo("dashboard-picture")
+ if err == nil && len(pictureInfo) > 0 {
+ log.Info("set image info=" + pictureInfo[0]["url"])
+ ctx.Data["image_url"] = pictureInfo[0]["url"]
+ ctx.Data["image_link"] = pictureInfo[0]["image_link"]
+ }
if !ctx.User.IsActive && setting.Service.RegisterEmailConfirm {
ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
ctx.HTML(200, user.TplActivate)
@@ -259,7 +266,11 @@ func ExploreRepos(ctx *context.Context) {
ctx.Data["PageIsExplore"] = true
ctx.Data["PageIsExploreRepositories"] = true
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
-
+ pictureInfo, err := getImageInfo("explore-user-picture")
+ if err == nil && len(pictureInfo) > 0 {
+ ctx.Data["image_url"] = pictureInfo[0]["url"]
+ ctx.Data["image_link"] = pictureInfo[0]["image_link"]
+ }
var ownerID int64
if ctx.User != nil && !ctx.User.IsAdmin {
ownerID = ctx.User.ID
@@ -434,7 +445,11 @@ func ExploreUsers(ctx *context.Context) {
ctx.Data["PageIsExplore"] = true
ctx.Data["PageIsExploreUsers"] = true
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
-
+ pictureInfo, err := getImageInfo("explore-user-picture")
+ if err == nil && len(pictureInfo) > 0 {
+ ctx.Data["image_url"] = pictureInfo[0]["url"]
+ ctx.Data["image_link"] = pictureInfo[0]["image_link"]
+ }
RenderUserSearch(ctx, &models.SearchUserOptions{
Actor: ctx.User,
Type: models.UserTypeIndividual,
@@ -471,7 +486,7 @@ func ExploreOrganizations(ctx *context.Context) {
return
}
- recommendOrgs, err := GetRecommendOrg()
+ recommendOrgs, err := getRecommendOrg()
if err != nil {
log.Error("GetRecommendOrgInfos failed:%v", err.Error(), ctx.Data["MsgID"])
ctx.ServerError("GetRecommendOrgInfos", err)
@@ -606,31 +621,31 @@ func ExploreImages(ctx *context.Context) {
}
func ExploreDataAnalysisUserTrend(ctx *context.Context) {
- ctx.Data["url_params"]="UserTrend"
+ ctx.Data["url_params"] = "UserTrend"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysisUserAnalysis(ctx *context.Context) {
- ctx.Data["url_params"]="UserAnalysis"
+ ctx.Data["url_params"] = "UserAnalysis"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysisProTrend(ctx *context.Context) {
- ctx.Data["url_params"]="ProTrend"
+ ctx.Data["url_params"] = "ProTrend"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysisProAnalysis(ctx *context.Context) {
- ctx.Data["url_params"]="ProAnalysis"
+ ctx.Data["url_params"] = "ProAnalysis"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysisOverview(ctx *context.Context) {
- ctx.Data["url_params"]="Overview"
+ ctx.Data["url_params"] = "Overview"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysisBrainAnalysis(ctx *context.Context) {
- ctx.Data["url_params"]="BrainAnalysis"
+ ctx.Data["url_params"] = "BrainAnalysis"
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
func ExploreDataAnalysis(ctx *context.Context) {
- ctx.Data["url_params"]=""
+ ctx.Data["url_params"] = ""
ctx.HTML(200, tplExploreExploreDataAnalysis)
}
@@ -640,7 +655,7 @@ func NotFound(ctx *context.Context) {
ctx.NotFound("home.NotFound", nil)
}
-func GetRecommendOrg() ([]map[string]interface{}, error) {
+func getRecommendOrg() ([]map[string]interface{}, error) {
url := setting.RecommentRepoAddr + "organizations"
result, err := repository.RecommendFromPromote(url)
@@ -668,17 +683,18 @@ func GetRecommendOrg() ([]map[string]interface{}, error) {
}
return resultOrg, nil
}
-func GetImageInfo() ([]map[string]interface{}, error) {
- url := setting.RecommentRepoAddr + "picture_info"
+
+func getImageInfo(filename string) ([]map[string]string, error) {
+ url := setting.RecommentRepoAddr + filename
result, err := repository.RecommendFromPromote(url)
if err != nil {
return nil, err
}
- imageInfo := make([]map[string]interface{}, 0)
+ imageInfo := make([]map[string]string, 0)
for i := 0; i < (len(result) - 1); i++ {
line := result[i]
- imageMap := make(map[string]interface{})
+ imageMap := make(map[string]string)
if line[0:4] == "url=" {
url := line[4:]
imageMap["url"] = url
@@ -731,14 +747,14 @@ func GetRankUser(index string) ([]map[string]interface{}, error) {
return resultOrg, nil
}
-func GetImageInfoFromPromote(ctx *context.Context) {
- imageInfo, err := GetImageInfo()
- if err != nil {
- ctx.ServerError("500", err)
- return
- }
- ctx.JSON(200, imageInfo)
-}
+// func GetImageInfoFromPromote(ctx *context.Context) {
+// imageInfo, err := GetImageInfo()
+// if err != nil {
+// ctx.ServerError("500", err)
+// return
+// }
+// ctx.JSON(200, imageInfo)
+// }
func GetUserRankFromPromote(ctx *context.Context) {
index := ctx.Params("index")
@@ -750,15 +766,49 @@ func GetUserRankFromPromote(ctx *context.Context) {
ctx.JSON(200, resultUserRank)
}
-func RecommendOrgFromPromote(ctx *context.Context) {
- resultOrg, err := GetRecommendOrg()
+func RecommendHomeInfo(ctx *context.Context) {
+ resultOrg, err := getRecommendOrg()
if err != nil {
- ctx.ServerError("500", err)
- return
+ log.Info("error." + err.Error())
+ }
+ resultRepo, err := repository.GetRecommendRepoFromPromote("projects")
+ if err != nil {
+ log.Info("error." + err.Error())
+ }
+ resultImage, err := getImageInfo("picture_info")
+ if err != nil {
+ log.Info("error." + err.Error())
}
- ctx.JSON(200, resultOrg)
+ resultCloudBrain, err := getCloudbrainNums()
+ if err != nil {
+ log.Info("error." + err.Error())
+ }
+ mapInterface := make(map[string]interface{})
+ mapInterface["org"] = resultOrg
+ mapInterface["repo"] = resultRepo
+ mapInterface["image"] = resultImage
+ mapInterface["cloudbrain"] = resultCloudBrain
+ ctx.JSON(http.StatusOK, mapInterface)
}
+func getCloudbrainNums() (map[string]string, error) {
+ result := make(map[string]string)
+ cloudStatusMap := models.GetAllStatusCloudBrain()
+ result["completed_task"] = fmt.Sprint(cloudStatusMap["COMPLETED"])
+ result["running_task"] = fmt.Sprint(cloudStatusMap["RUNNING"])
+ result["wait_task"] = fmt.Sprint(cloudStatusMap["WAITING"])
+ return result, nil
+}
+
+// func RecommendOrgFromPromote(ctx *context.Context) {
+// resultOrg, err := GetRecommendOrg()
+// if err != nil {
+// ctx.ServerError("500", err)
+// return
+// }
+// ctx.JSON(200, resultOrg)
+// }
+
func RecommendRepoFromPromote(ctx *context.Context) {
result, err := repository.GetRecommendRepoFromPromote("projects")
if err != nil {
diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go
index aa52a1400..dc2c417e4 100755
--- a/routers/repo/attachment.go
+++ b/routers/repo/attachment.go
@@ -902,16 +902,17 @@ func CompleteMultipart(ctx *context.Context) {
if err != nil {
log.Error("SendDecompressTask(%s) failed:%s", uuid, err.Error())
} else {
- attachment.DecompressState = models.DecompressStateIng
- err = models.UpdateAttachment(attachment)
- if err != nil {
- log.Error("UpdateAttachment state(%s) failed:%s", uuid, err.Error())
- }
+ updateAttachmentDecompressStateIng(attachment)
}
}
if typeCloudBrain == models.TypeCloudBrainTwo {
attachjson, _ := json.Marshal(attachment)
- labelmsg.SendDecompressAttachToLabelOBS(string(attachjson))
+ err = labelmsg.SendDecompressAttachToLabelOBS(string(attachjson))
+ if err != nil {
+ log.Error("SendDecompressTask to labelsystem (%s) failed:%s", attachment.UUID, err.Error())
+ } else {
+ updateAttachmentDecompressStateIng(attachment)
+ }
}
} else {
var labelMap map[string]string
diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index 12d254812..a5dd52956 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -206,7 +206,7 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
ctx.Data["PageIsCloudBrain"] = true
displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
- image := form.Image
+ image := strings.TrimSpace(form.Image)
uuid := form.Attachment
jobType := form.JobType
gpuQueue := form.GpuType
@@ -283,30 +283,6 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
mkModelPath(modelPath)
uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/")
- benchmarkPath := setting.JobPath + jobName + cloudbrain.BenchMarkMountPath
- if setting.IsBenchmarkEnabled && jobType == string(models.JobTypeBenchmark) {
- var gpuType string
- for _, gpuInfo := range gpuInfos.GpuInfo {
- if gpuInfo.Queue == gpuQueue {
- gpuType = gpuInfo.Value
- }
- }
- downloadRateCode(repo, jobName, setting.BenchmarkOwner, setting.BenchmarkName, benchmarkPath, form.BenchmarkCategory, gpuType)
- uploadCodeToMinio(benchmarkPath+"/", jobName, cloudbrain.BenchMarkMountPath+"/")
- }
-
- snn4imagenetPath := setting.JobPath + jobName + cloudbrain.Snn4imagenetMountPath
- if setting.IsSnn4imagenetEnabled && jobType == string(models.JobTypeSnn4imagenet) {
- downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "")
- uploadCodeToMinio(snn4imagenetPath+"/", jobName, cloudbrain.Snn4imagenetMountPath+"/")
- }
-
- brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath
- if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) {
- downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "")
- uploadCodeToMinio(brainScorePath+"/", jobName, cloudbrain.BrainScoreMountPath+"/")
- }
-
err = cloudbrain.GenerateTask(ctx, displayJobName, jobName, image, command, uuid, storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
@@ -482,6 +458,17 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
ctx.Data["resource_type"] = resourceType.Value
}
}
+ } else if cloudbrain.IsBenchmarkJob(task.JobType) {
+ if benchmarkGpuInfos == nil {
+ json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &benchmarkGpuInfos)
+ }
+
+ for _, resourceType := range benchmarkGpuInfos.GpuInfo {
+ if resourceType.Queue == jobRes.Config.GpuType {
+ ctx.Data["resource_type"] = resourceType.Value
+ }
+ }
+
} else {
if gpuInfos == nil {
json.Unmarshal([]byte(setting.GpuTypes), &gpuInfos)
@@ -1241,7 +1228,7 @@ func downloadCode(repo *models.Repository, codePath, branchName string) error {
return nil
}
-func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepoName, codePath, benchmarkCategory, gpuType string) error {
+func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepoName, codePath, benchmarkCategory, gpuType, userName string) error {
err := os.MkdirAll(codePath, os.ModePerm)
if err != nil {
log.Error("mkdir codePath failed", err.Error())
@@ -1269,7 +1256,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo
defer f.Close()
data, err := json.Marshal(models.TaskInfo{
- Username: repo.Owner.Name,
+ Username: userName,
TaskName: taskName,
CodeName: repo.Name,
BenchmarkCategory: strings.Split(benchmarkCategory, ","),
@@ -1845,7 +1832,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
ctx.Data["PageIsCloudBrain"] = true
displayJobName := form.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
- image := form.Image
+ image := strings.TrimSpace(form.Image)
gpuQueue := form.GpuType
command := cloudbrain.CommandBenchmark
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
@@ -1970,7 +1957,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
}
}
- if err := downloadRateCode(repo, jobName, childInfo.Owner, childInfo.RepoName, benchmarkPath, form.BenchmarkCategory, gpuType); err != nil {
+ if err := downloadRateCode(repo, jobName, childInfo.Owner, childInfo.RepoName, benchmarkPath, form.BenchmarkCategory, gpuType, ctx.User.Name); err != nil {
log.Error("downloadRateCode failed, %v", err, ctx.Data["MsgID"])
//cloudBrainNewDataPrepare(ctx)
//ctx.RenderWithErr("system error", tplCloudBrainBenchmarkNew, &form)
@@ -2068,7 +2055,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
snn4imagenetPath := setting.JobPath + jobName + cloudbrain.Snn4imagenetMountPath
if setting.IsSnn4imagenetEnabled && jobType == string(models.JobTypeSnn4imagenet) {
- downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "")
+ downloadRateCode(repo, jobName, setting.Snn4imagenetOwner, setting.Snn4imagenetName, snn4imagenetPath, "", "", ctx.User.Name)
uploadCodeToMinio(snn4imagenetPath+"/", jobName, cloudbrain.Snn4imagenetMountPath+"/")
command = fmt.Sprintf(cloudbrain.Snn4imagenetCommand, displayJobName, trimSpaceNewlineInString(form.Description))
@@ -2076,7 +2063,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
benchmarkChildTypeID := 0
brainScorePath := setting.JobPath + jobName + cloudbrain.BrainScoreMountPath
if setting.IsBrainScoreEnabled && jobType == string(models.JobTypeBrainScore) {
- downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "")
+ downloadRateCode(repo, jobName, setting.BrainScoreOwner, setting.BrainScoreName, brainScorePath, "", "", ctx.User.Name)
uploadCodeToMinio(brainScorePath+"/", jobName, cloudbrain.BrainScoreMountPath+"/")
benchmarkChildTypeID = form.BenchmarkChildTypeID
command = fmt.Sprintf(cloudbrain.BrainScoreCommand, getBrainRegion(benchmarkChildTypeID), displayJobName, trimSpaceNewlineInString(form.Description))
@@ -2136,7 +2123,7 @@ func CloudBrainTrainJobNew(ctx *context.Context) {
func getTrainJobCommand(form auth.CreateCloudBrainForm) (string, error) {
var command string
- bootFile := form.BootFile
+ bootFile := strings.TrimSpace(form.BootFile)
params := form.Params
if !strings.HasSuffix(bootFile, ".py") {
diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go
index 73036a2cc..0e57fe1a0 100755
--- a/routers/repo/dataset.go
+++ b/routers/repo/dataset.go
@@ -106,6 +106,8 @@ func DatasetIndex(ctx *context.Context) {
MustEnableDataset(ctx)
ctx.Data["PageIsDataset"] = true
+ ctx.Data["SortType"] = ctx.Query("sort")
+
repo := ctx.Repo.Repository
dataset, err := models.GetDatasetByRepo(repo)
@@ -128,9 +130,31 @@ func DatasetIndex(ctx *context.Context) {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix > attachments[j].CreatedUnix
- })
+ if ctx.Data["SortType"] == "nameAsc" {
+ sort.Slice(attachments, func(i, j int) bool {
+ return strings.ToLower(attachments[i].Name) < strings.ToLower(attachments[j].Name)
+ })
+ } else if ctx.Data["SortType"] == "nameDesc" {
+ sort.Slice(attachments, func(i, j int) bool {
+ return strings.ToLower(attachments[i].Name) > strings.ToLower(attachments[j].Name)
+ })
+ } else if ctx.Data["SortType"] == "sizeAsc" {
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].Size < attachments[j].Size
+ })
+ } else if ctx.Data["SortType"] == "sizeDesc" {
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].Size > attachments[j].Size
+ })
+ } else if ctx.Data["SortType"] == "timeAsc" {
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].CreatedUnix < attachments[j].CreatedUnix
+ })
+ } else {
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].CreatedUnix > attachments[j].CreatedUnix
+ })
+ }
page := ctx.QueryInt("page")
if page <= 0 {
diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go
index bcc6f6156..e099a19ff 100755
--- a/routers/repo/modelarts.go
+++ b/routers/repo/modelarts.go
@@ -967,7 +967,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
description := form.Description
workServerNumber := form.WorkServerNumber
engineID := form.EngineID
- bootFile := form.BootFile
+ bootFile := strings.TrimSpace(form.BootFile)
flavorCode := form.Flavor
params := form.Params
poolID := form.PoolID
@@ -1210,7 +1210,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
description := form.Description
workServerNumber := form.WorkServerNumber
engineID := form.EngineID
- bootFile := form.BootFile
+ bootFile := strings.TrimSpace(form.BootFile)
flavorCode := form.Flavor
params := form.Params
poolID := form.PoolID
@@ -1284,7 +1284,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
var parameters models.Parameters
param := make([]models.Parameter, 0)
- existDeviceTarget := true
+ existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), ¶meters)
if err != nil {
@@ -1471,8 +1471,8 @@ func obsMkdir(dir string) error {
}
func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error {
- if !strings.HasSuffix(form.BootFile, ".py") {
- log.Error("the boot file(%s) must be a python file", form.BootFile)
+ if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") {
+ log.Error("the boot file(%s) must be a python file", strings.TrimSpace(form.BootFile))
return errors.New("启动文件必须是python文件")
}
@@ -1489,8 +1489,8 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error {
}
func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) error {
- if !strings.HasSuffix(form.BootFile, ".py") {
- log.Error("the boot file(%s) must be a python file", form.BootFile)
+ if !strings.HasSuffix(strings.TrimSpace(form.BootFile), ".py") {
+ log.Error("the boot file(%s) must be a python file", strings.TrimSpace(form.BootFile))
return errors.New("启动文件必须是python文件")
}
@@ -1803,7 +1803,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
description := form.Description
workServerNumber := form.WorkServerNumber
engineID := form.EngineID
- bootFile := form.BootFile
+ bootFile := strings.TrimSpace(form.BootFile)
flavorCode := form.Flavor
params := form.Params
poolID := form.PoolID
diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go
index 1bebb9f3e..207727af1 100755
--- a/routers/repo/user_data_analysis.go
+++ b/routers/repo/user_data_analysis.go
@@ -4,6 +4,7 @@ import (
"fmt"
"net/http"
"net/url"
+ "os"
"time"
"code.gitea.io/gitea/models"
@@ -16,7 +17,8 @@ import (
)
const (
- PAGE_SIZE = 2000
+ PAGE_SIZE = 2000
+ Excel_File_Path = "/useranalysis/"
)
func getUserMetricsExcelHeader(ctx *context.Context) map[string]string {
@@ -27,6 +29,7 @@ func getUserMetricsExcelHeader(ctx *context.Context) map[string]string {
excelHeader = append(excelHeader, ctx.Tr("user.metrics.hasactivateuser"))
excelHeader = append(excelHeader, ctx.Tr("user.metrics.newregistnotactiveuser"))
excelHeader = append(excelHeader, ctx.Tr("user.metrics.newuseractiveindex"))
+ excelHeader = append(excelHeader, ctx.Tr("user.metrics.currentdayactivity"))
excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalregistuser"))
excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalactiveduser"))
excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalhasactivityuser"))
@@ -65,6 +68,10 @@ func writeUserMetricsExcel(row int, xlsx *excelize.File, sheetName string, userM
}
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, value)
tmp = tmp + 1
+
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.HasActivityUser)
+ tmp = tmp + 1
+
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalActivateRegistUser)
@@ -104,6 +111,9 @@ func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage"))
excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.email"))
+ excelHeader = append(excelHeader, ctx.Tr("user.static.location"))
+
excelHeader = append(excelHeader, ctx.Tr("user.static.registdate"))
excelHeader = append(excelHeader, ctx.Tr("user.static.countdate"))
@@ -179,6 +189,13 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage)
tmp = tmp + 1
+
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email)
+ tmp = tmp + 1
+
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation)
+ tmp = tmp + 1
+
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
tmp = tmp + 1
@@ -186,6 +203,82 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime)
}
+
+func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysis) {
+ rows := fmt.Sprint(row)
+ var tmp byte
+ tmp = 0
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive))
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage)
+ tmp = tmp + 1
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage)
+ tmp = tmp + 1
+
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Email)
+ tmp = tmp + 1
+
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation)
+ tmp = tmp + 1
+
+ formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
+ tmp = tmp + 1
+
+ formatTime = userRecord.DataDate
+ xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime)
+}
+
func getColumn(tmp byte) string {
var tmpA byte
tmpA = 'A'
@@ -330,6 +423,23 @@ func QueryRankingList(ctx *context.Context) {
ctx.JSON(http.StatusOK, mapInterface)
}
+func DownloadUserDefineFile(ctx *context.Context) {
+ filename := ctx.Query("filename")
+ length := len(filename)
+ if filename[0:1] == "\"" {
+ filename = filename[1 : length-1]
+ }
+ allFilename := setting.AppDataPath + Excel_File_Path + filename
+ log.Info("allFilename=" + allFilename)
+ _, err := os.Stat(allFilename)
+ if err != nil { //文件不存在
+ log.Info("file not exist.")
+ ctx.JSON(http.StatusOK, "File Not Exist.")
+ } else {
+ ctx.ServeFile(allFilename, url.QueryEscape(filename))
+ }
+}
+
func QueryUserMetricsCurrentMonth(ctx *context.Context) {
currentTimeNow := time.Now()
@@ -365,6 +475,10 @@ func QueryUserMetricsCurrentWeek(ctx *context.Context) {
func QueryUserStaticCurrentWeek(ctx *context.Context) {
queryUserDataPage(ctx, "public.user_business_analysis_current_week", new(models.UserBusinessAnalysisCurrentWeek))
}
+func QueryUserStaticLastWeek(ctx *context.Context) {
+ queryUserDataPage(ctx, "public.user_business_analysis_last_week", new(models.UserBusinessAnalysisLastWeek))
+}
+
func QueryUserMetricsCurrentYear(ctx *context.Context) {
currentTimeNow := time.Now()
pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location())
@@ -450,14 +564,13 @@ func QueryUserStaticDataPage(ctx *context.Context) {
endTime = time.Now()
} else {
startTime, _ = time.ParseInLocation("2006-01-02", startDate, time.Local)
- startTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 12, 0, 0, 0, startTime.Location())
+ startTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), 0, 0, 0, 0, startTime.Location())
settingStartTime, _ := time.Parse("2006-01-02", setting.RadarMap.RecordBeginTime)
if startTime.Unix() < settingStartTime.Unix() {
startTime = settingStartTime
startDate = settingStartTime.Format("2006-01-02")
}
endTime, _ = time.ParseInLocation("2006-01-02", endDate, time.Local)
- endTime = endTime.AddDate(0, 0, 1)
endTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), 23, 59, 59, 0, startTime.Location())
isAll = false
@@ -481,36 +594,14 @@ func QueryUserStaticDataPage(ctx *context.Context) {
}
if IsReturnFile {
- re, count := models.QueryUserStaticDataAll(pageOpts)
- log.Info("return count=" + fmt.Sprint(count))
- //writer exec file.
- xlsx := excelize.NewFile()
+ //re, count := models.QueryUserStaticDataAll(pageOpts)
+ wikiMap, _ := queryWikiCountMap(startTime, endTime)
+ re, count := models.QueryUserStaticDataForUserDefine(pageOpts, wikiMap)
sheetName := ctx.Tr("user.static.sheetname")
- index := xlsx.NewSheet(sheetName)
- xlsx.DeleteSheet("Sheet1")
-
- dataHeader := getExcelHeader(ctx)
- for k, v := range dataHeader {
- //设置单元格的值
- xlsx.SetCellValue(sheetName, k, v)
- }
-
- for i, userRecord := range re {
- row := i + 2
- writeExcel(row, xlsx, sheetName, userRecord)
- }
-
- //设置默认打开的表单
- xlsx.SetActiveSheet(index)
-
- filename := sheetName + "_" + ctx.Tr("user.static.all") + ".xlsx"
-
- ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename))
- ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
- if _, err := xlsx.WriteTo(ctx.Resp); err != nil {
- log.Info("writer exel error." + err.Error())
- }
-
+ filename := sheetName + "_" + startDate + "_" + endDate + ".xlsx"
+ os.Remove(setting.AppDataPath + Excel_File_Path + filename)
+ go writeFileToDisk(ctx, count, re, filename)
+ ctx.JSON(http.StatusOK, ctx.Tr("user.static.downloadinfo")+setting.AppURL+"api/v1/download_user_define_file?filename="+filename)
} else {
mapInterface := make(map[string]interface{})
re, count := models.QueryUserStaticDataPage(pageOpts)
@@ -520,22 +611,47 @@ func QueryUserStaticDataPage(ctx *context.Context) {
}
}
-func TimingCountDataByDateAndReCount(date string, isReCount bool) {
+func writeFileToDisk(ctx *context.Context, count int64, re []*models.UserBusinessAnalysis, filename string) {
+ log.Info("return count=" + fmt.Sprint(count))
+ //writer exec file.
+ xlsx := excelize.NewFile()
+ sheetName := ctx.Tr("user.static.sheetname")
+ index := xlsx.NewSheet(sheetName)
+ xlsx.DeleteSheet("Sheet1")
+
+ dataHeader := getExcelHeader(ctx)
+ for k, v := range dataHeader {
+ //设置单元格的值
+ xlsx.SetCellValue(sheetName, k, v)
+ }
- t, _ := time.Parse("2006-01-02", date)
- startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
+ for i, userRecord := range re {
+ row := i + 2
+ writeExcelPage(row, xlsx, sheetName, userRecord)
+ }
- endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location())
+ //设置默认打开的表单
+ xlsx.SetActiveSheet(index)
- //query wiki data
- log.Info("start to time count data")
+ //ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename))
+ //ctx.Resp.Header().Set("Content-Type", "application/octet-stream")
+ filename = setting.AppDataPath + Excel_File_Path + filename
+ os.Mkdir(setting.AppDataPath+Excel_File_Path, 0755)
+ if err := xlsx.SaveAs(filename); err != nil {
+ log.Info("writer exel error." + err.Error())
+ } else {
+ log.Info("write to file succeed, filepath=" + filename)
+ }
+}
+
+func queryWikiCountMap(startTime time.Time, endTime time.Time) (map[string]int, error) {
wikiMap := make(map[string]int)
warnEmailMessage := "用户统计信息入库失败,请尽快定位。"
repoList, err := models.GetAllRepositories()
if err != nil {
log.Error("query repo error." + err.Error())
mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage)
- return
+ return nil, err
}
log.Info("start to query wiki data")
for _, repoRecord := range repoList {
@@ -543,7 +659,7 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) {
time, err := git.GetLatestCommitTime(wikiPath)
if err == nil {
log.Info("last commit time:" + time.Format("2006-01-02 15:04:05") + " wikiPath=" + wikiPath)
- if time.After(startTime) {
+ if time.After(startTime) && time.Before(endTime) {
wikiRepo, _, err := FindWikiRepoCommitByWikiPath(wikiPath)
if err != nil {
log.Error("wiki not exist. wikiPath=" + wikiPath)
@@ -568,14 +684,29 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) {
}
}
}
+ return wikiMap, nil
+}
+
+func TimingCountDataByDateAndReCount(date string, isReCount bool) {
+
+ t, _ := time.Parse("2006-01-02", date)
+ startTime := time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
+ startTime = startTime.UTC()
+ endTime := time.Date(t.Year(), t.Month(), t.Day(), 23, 59, 59, 0, t.Location())
+ endTime = endTime.UTC()
+ log.Info("startTime time:" + startTime.Format("2006-01-02 15:04:05"))
+ log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05"))
+ warnEmailMessage := "用户统计信息入库失败,请尽快定位。"
+ //query wiki data
+ log.Info("start to time count data")
+ wikiMap, err := queryWikiCountMap(startTime, endTime)
//other user info data
err = models.CounDataByDateAndReCount(wikiMap, startTime, endTime, isReCount)
if err != nil {
log.Error("count user info error." + err.Error())
mailer.SendWarnNotifyMail(setting.Warn_Notify_Mails, warnEmailMessage)
}
- log.Info("start to count all user info data")
- //models.RefreshUserStaticAllTabel(wikiMap)
+
log.Info("end to count all user info data")
}
diff --git a/routers/routes/routes.go b/routers/routes/routes.go
index 4c3f5f472..12d0e1cf8 100755
--- a/routers/routes/routes.go
+++ b/routers/routes/routes.go
@@ -323,10 +323,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/dashboard", routers.Dashboard)
go routers.SocketManager.Run()
m.Get("/action/notification", routers.ActionNotification)
- m.Get("/recommend/org", routers.RecommendOrgFromPromote)
- m.Get("/recommend/repo", routers.RecommendRepoFromPromote)
+ m.Get("/recommend/home", routers.RecommendHomeInfo)
+ //m.Get("/recommend/org", routers.RecommendOrgFromPromote)
+ //m.Get("/recommend/repo", routers.RecommendRepoFromPromote)
m.Get("/recommend/userrank/:index", routers.GetUserRankFromPromote)
- m.Get("/recommend/imageinfo", routers.GetImageInfoFromPromote)
+ //m.Get("/recommend/imageinfo", routers.GetImageInfoFromPromote)
m.Post("/all/search/", routers.Search)
m.Get("/all/search/", routers.EmptySearch)
m.Get("/all/dosearch/", routers.SearchApi)
diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl
index a0f5754d5..68af56e59 100644
--- a/templates/base/footer.tmpl
+++ b/templates/base/footer.tmpl
@@ -12,7 +12,10 @@
{{template "base/footer_content" .}}
-
+
+
+
+
{{if .RequireSimpleMDE}}
@@ -37,15 +40,17 @@
{{if .RequireTribute}}
{{end}}
+
+
{{template "custom/footer" .}}
{{if .PageIsHome}}
+
+
-
-
-
+
{{end}}
+