diff --git a/custom/conf/app.ini.sample b/custom/conf/app.ini.sample index b444dd008..9fd00a763 100755 --- a/custom/conf/app.ini.sample +++ b/custom/conf/app.ini.sample @@ -385,6 +385,17 @@ CONN_MAX_LIFETIME = 3s ; Database maximum number of open connections, default is 0 meaning no maximum MAX_OPEN_CONNS = 0 +[database_statistic] +DB_TYPE = postgres +HOST = 127.0.0.1:5432 +NAME = statistic +USER = +PASSWD = +SCHEMA = +SSL_MODE = disable +CHARSET = utf8 +PATH = + [indexer] ; Issue indexer type, currently support: bleve, db or elasticsearch, default is bleve ISSUE_INDEXER_TYPE = bleve diff --git a/go.mod b/go.mod old mode 100644 new mode 100755 index c663ab2ff..9f93281c3 --- a/go.mod +++ b/go.mod @@ -52,6 +52,7 @@ require ( github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 github.com/golang/protobuf v1.4.1 // indirect + github.com/gomodule/redigo v2.0.0+incompatible github.com/google/go-github/v24 v24.0.1 github.com/gorilla/context v1.1.1 github.com/hashicorp/go-retryablehttp v0.6.6 // indirect diff --git a/models/attachment.go b/models/attachment.go index 418d7c881..684a38b21 100755 --- a/models/attachment.go +++ b/models/attachment.go @@ -464,3 +464,12 @@ func CanDelAttachment(isSigned bool, user *User, attach *Attachment) bool { } return false } + +func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { + total, err := x.Where("dataset_id = ?", datasetID).SumInt(&Attachment{}, "size") + if err != nil { + return 0, err + } + + return total, nil +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index edd5f102a..4b2bec8e6 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -5,11 +5,12 @@ import ( "fmt" "strings" "time" + "xorm.io/builder" "xorm.io/xorm" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" - "xorm.io/builder" ) type CloudbrainStatus string @@ -59,12 +60,18 @@ type Cloudbrain struct { UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` DeletedAt time.Time `xorm:"deleted"` CanDebug bool `xorm:"-"` + CanDel bool `xorm:"-"` Type int `xorm:"INDEX DEFAULT 0"` User *User `xorm:"-"` Repo *Repository `xorm:"-"` } +type CloudbrainInfo struct { + Cloudbrain `xorm:"extends"` + User `xorm:"extends"` +} + type CloudBrainLoginResult struct { Code string Msg string @@ -523,7 +530,7 @@ type NotebookDelResult struct { InstanceID string `json:"instance_id"` } -func Cloudbrains(opts *CloudbrainsOptions) ([]*Cloudbrain, int64, error) { +func Cloudbrains(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { sess := x.NewSession() defer sess.Close() @@ -583,8 +590,10 @@ func Cloudbrains(opts *CloudbrainsOptions) ([]*Cloudbrain, int64, error) { } sess.OrderBy("cloudbrain.created_unix DESC") - cloudbrains := make([]*Cloudbrain, 0, setting.UI.IssuePagingNum) - if err := sess.Where(cond).Find(&cloudbrains); err != nil { + cloudbrains := make([]*CloudbrainInfo, 0, setting.UI.IssuePagingNum) + if err := sess.Table(&Cloudbrain{}).Where(cond). + Join("left", "`user`", "cloudbrain.user_id = `user`.id"). + Find(&cloudbrains); err != nil { return nil, 0, fmt.Errorf("Find: %v", err) } sess.Close() @@ -620,6 +629,18 @@ func GetCloudbrainByJobID(jobID string) (*Cloudbrain, error) { return getRepoCloudBrain(cb) } +func GetCloudbrainsNeededStopByUserID(userID int64) ([]*Cloudbrain, error) { + cloudBrains := make([]*Cloudbrain, 0) + err := x.Cols("job_id", "status", "type").Where("user_id=? AND status !=?", userID, string(JobStopped)).Find(&cloudBrains) + return cloudBrains, err +} + +func GetCloudbrainsNeededStopByRepoID(repoID int64) ([]*Cloudbrain, error) { + cloudBrains := make([]*Cloudbrain, 0) + err := x.Cols("job_id", "status", "type").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains) + return cloudBrains, err +} + func SetCloudbrainStatusByJobID(jobID string, status CloudbrainStatus) (err error) { cb := &Cloudbrain{JobID: jobID, Status: string(status)} _, err = x.Cols("status").Where("cloudbrain.job_id=?", jobID).Update(cb) @@ -650,3 +671,24 @@ func GetCloudbrainByName(jobName string) (*Cloudbrain, error) { cb := &Cloudbrain{JobName: jobName} return getRepoCloudBrain(cb) } + +func CanDelJob(isSigned bool, user *User, job *CloudbrainInfo) bool { + if !isSigned || job.Status != string(JobStopped) { + return false + } + repo, err := GetRepositoryByID(job.RepoID) + if err != nil { + log.Error("GetRepositoryByID failed:%v", err.Error()) + return false + } + permission, _ := GetUserRepoPermission(repo, user) + if err != nil { + log.Error("GetUserRepoPermission failed:%v", err.Error()) + return false + } + + if (user.ID == job.UserID && permission.AccessMode >= AccessModeWrite) || user.IsAdmin || permission.AccessMode >= AccessModeAdmin { + return true + } + return false +} diff --git a/models/issue_comment.go b/models/issue_comment.go old mode 100644 new mode 100755 index f7017435d..60d38452c --- a/models/issue_comment.go +++ b/models/issue_comment.go @@ -1016,3 +1016,19 @@ func UpdateCommentsMigrationsByType(tp structs.GitServiceType, originalAuthorID }) return err } + +func GetCommentCountByRepoID(repoID int64) (int64, error) { + //sql := fmt.Sprintf("select count(1) from comment where issue_id in (select id from issue where repo_id = %d) and type = %d;", repoID, CommentTypeComment) + //res, err := x.Query(sql) + //if err != nil { + // return 0, err + //} + //return int64(binary.BigEndian.Uint64(res[0]["count"])), nil + + total, err := x.Where("issue_id in (select id from issue where repo_id = ?) and type = ?", repoID, CommentTypeComment).Count(&Comment{}) + if err != nil { + return 0, err + } + + return total, nil +} diff --git a/models/models.go b/models/models.go index 2fab3d0dc..412148235 100755 --- a/models/models.go +++ b/models/models.go @@ -59,6 +59,9 @@ var ( x *xorm.Engine tables []interface{} + xStatistic *xorm.Engine + tablesStatistic []interface{} + // HasEngine specifies if we have a xorm.Engine HasEngine bool ) @@ -132,14 +135,19 @@ func init() { new(RecommendOrg), ) + tablesStatistic = append(tablesStatistic, + new(RepoStatistic), + new(UserBusinessAnalysis), + ) + gonicNames := []string{"SSL", "UID"} for _, name := range gonicNames { names.LintGonicMapper[name] = true } } -func getEngine() (*xorm.Engine, error) { - connStr, err := setting.DBConnStr() +func getEngine(database *setting.DBInfo) (*xorm.Engine, error) { + connStr, err := setting.DBConnStr(database) if err != nil { return nil, err } @@ -153,14 +161,12 @@ func getEngine() (*xorm.Engine, error) { } engine.SetSchema(setting.Database.Schema) - HasEngine = true - return engine, nil } // NewTestEngine sets a new test xorm.Engine func NewTestEngine(x *xorm.Engine) (err error) { - x, err = getEngine() + x, err = getEngine(setting.Database) if err != nil { return fmt.Errorf("Connect to database: %v", err) } @@ -171,43 +177,80 @@ func NewTestEngine(x *xorm.Engine) (err error) { return x.StoreEngine("InnoDB").Sync2(tables...) } -// SetEngine sets the xorm.Engine +// setEngine sets the xorm.Engine +func setEngine(engine *xorm.Engine, table []interface{}, database *setting.DBInfo) (err error) { + engine.SetMapper(names.GonicMapper{}) + // WARNING: for serv command, MUST remove the output to os.stdout, + // so use log file to instead print to stdout. + engine.SetLogger(NewXORMLogger(setting.Database.LogSQL)) + engine.ShowSQL(setting.Database.LogSQL) + engine.SetMaxOpenConns(setting.Database.MaxOpenConns) + engine.SetMaxIdleConns(setting.Database.MaxIdleConns) + engine.SetConnMaxLifetime(setting.Database.ConnMaxLifetime) + engine.Sync2(table...) + MigrateCustom(engine) + return nil +} + func SetEngine() (err error) { - x, err = getEngine() + x, err = getEngine(setting.Database) if err != nil { return fmt.Errorf("Failed to connect to database: %v", err) } + if err = setEngine(x, tables, setting.Database); err != nil { + return err + } + + xStatistic, err = getEngine(setting.DatabaseStatistic) + if err != nil { + return fmt.Errorf("Failed to connect to database: %v", err) + } + if err = setEngine(xStatistic, tablesStatistic, setting.DatabaseStatistic); err != nil { + return err + } - x.SetMapper(names.GonicMapper{}) - // WARNING: for serv command, MUST remove the output to os.stdout, - // so use log file to instead print to stdout. - x.SetLogger(NewXORMLogger(setting.Database.LogSQL)) - x.ShowSQL(setting.Database.LogSQL) - x.SetMaxOpenConns(setting.Database.MaxOpenConns) - x.SetMaxIdleConns(setting.Database.MaxIdleConns) - x.SetConnMaxLifetime(setting.Database.ConnMaxLifetime) - x.Sync2(tables...) - MigrateCustom(x) return nil } -// NewEngine initializes a new xorm.Engine func NewEngine(ctx context.Context, migrateFunc func(*xorm.Engine) error) (err error) { - if err = SetEngine(); err != nil { + x, err = getEngine(setting.Database) + if err != nil { + return fmt.Errorf("Failed to connect to database: %v", err) + } + if err = newEngine(ctx, migrateFunc, x, tables, setting.Database); err != nil { + return fmt.Errorf("newEngine failed: %v", err) + } + + xStatistic, err = getEngine(setting.DatabaseStatistic) + if err != nil { + return fmt.Errorf("Failed to connect to database: %v", err) + } + if err = newEngine(ctx, migrateFunc, xStatistic, tablesStatistic, setting.DatabaseStatistic); err != nil { + return fmt.Errorf("newEngine statistic failed: %v", err) + } + + HasEngine = true + + return nil +} + +// newEngine initializes a new xorm.Engine +func newEngine(ctx context.Context, migrateFunc func(*xorm.Engine) error, engine *xorm.Engine, table []interface{}, database *setting.DBInfo) (err error) { + if err = setEngine(engine, table, database); err != nil { return err } - x.SetDefaultContext(ctx) + engine.SetDefaultContext(ctx) - if err = x.Ping(); err != nil { + if err = engine.Ping(); err != nil { return err } - if err = migrateFunc(x); err != nil { + if err = migrateFunc(engine); err != nil { return fmt.Errorf("migrate: %v", err) } - if err = x.StoreEngine("InnoDB").Sync2(tables...); err != nil { + if err = engine.StoreEngine("InnoDB").Sync2(table...); err != nil { return fmt.Errorf("sync database struct error: %v", err) } @@ -257,6 +300,11 @@ func Ping() error { if x != nil { return x.Ping() } + + if xStatistic != nil { + return xStatistic.Ping() + } + return errors.New("database not configured") } diff --git a/models/release.go b/models/release.go old mode 100644 new mode 100755 diff --git a/models/repo.go b/models/repo.go index 2742c3e31..7f4bfebba 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1424,6 +1424,12 @@ func GetAllRepositories() ([]*Repository, error) { return getALLRepositories(x) } +func GetAllRepositoriesByFilterCols(columns ...string) ([]*Repository, error) { + repos := make([]*Repository, 0, 1000) + return repos, x.Cols(columns...).Find(&repos) + +} + func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err error) { repo.LowerName = strings.ToLower(repo.Name) diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go new file mode 100644 index 000000000..f6cbf0331 --- /dev/null +++ b/models/repo_activity_custom.go @@ -0,0 +1,43 @@ +package models + +import "code.gitea.io/gitea/modules/git" + +func GetRepoKPIStats(repo *Repository) (*git.RepoKPIStats, error) { + wikiPath := "" + if repo.HasWiki() { + wikiPath = repo.WikiPath() + } + return git.GetRepoKPIStats(repo.RepoPath(), wikiPath) +} + +func GetAllUserKPIStats() (map[string]*git.UserKPIStats, error) { + authors := make(map[string]*git.UserKPIStats) + repositorys, err := GetAllRepositoriesByFilterCols("owner_name", "name") + if err != nil { + return nil, err + } + + for _, repository := range repositorys { + authorsOneRepo, err1 := git.GetUserKPIStats(repository.RepoPath()) + if err1 != nil { + return nil, err + } + + for key, value := range authorsOneRepo { + if _, ok := authors[key]; !ok { + authors[key] = &git.UserKPIStats{ + + Name: value.Name, + Email: value.Email, + Commits: 0, + CommitLines: 0, + } + } + authors[key].Commits += value.Commits + authors[key].CommitLines += value.CommitLines + + } + + } + return authors, nil +} diff --git a/models/repo_list.go b/models/repo_list.go index 57b6ebbd6..c4d8ee823 100755 --- a/models/repo_list.go +++ b/models/repo_list.go @@ -166,6 +166,8 @@ type SearchRepoOptions struct { Archived util.OptionalBool // only search topic name TopicOnly bool + //search by Specific TopicName + TopicName string // include description in keyword search IncludeDescription bool // None -> include has milestones AND has no milestone @@ -327,6 +329,18 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { } cond = cond.And(keywordCond) } + if opts.TopicName != "" { + var subQueryCond = builder.NewCond() + subQueryCond = subQueryCond.Or(builder.Eq{"topic.name": strings.ToLower(opts.TopicName)}) + subQuery := builder.Select("repo_topic.repo_id").From("repo_topic"). + Join("INNER", "topic", "topic.id = repo_topic.topic_id"). + Where(subQueryCond). + GroupBy("repo_topic.repo_id") + + var topicNameCond = builder.In("id", subQuery) + cond = cond.And(topicNameCond) + + } if opts.Fork != util.OptionalBoolNone { cond = cond.And(builder.Eq{"is_fork": opts.Fork == util.OptionalBoolTrue}) diff --git a/models/repo_statistic.go b/models/repo_statistic.go new file mode 100755 index 000000000..b987f4f46 --- /dev/null +++ b/models/repo_statistic.go @@ -0,0 +1,60 @@ +package models + +import ( + "code.gitea.io/gitea/modules/timeutil" + "fmt" +) + +// RepoStatistic statistic info of all repository +type RepoStatistic struct { + ID int64 `xorm:"pk autoincr"` + RepoID int64 `xorm:"unique(s) NOT NULL"` + Date string `xorm:"unique(s) NOT NULL"` + NumWatches int64 `xorm:"NOT NULL DEFAULT 0"` + NumStars int64 `xorm:"NOT NULL DEFAULT 0"` + NumForks int64 `xorm:"NOT NULL DEFAULT 0"` + NumDownloads int64 `xorm:"NOT NULL DEFAULT 0"` + NumComments int64 `xorm:"NOT NULL DEFAULT 0"` + NumVisits int64 `xorm:"NOT NULL DEFAULT 0"` + NumClosedIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumVersions int64 `xorm:"NOT NULL DEFAULT 0"` + //develop months + NumDevMonths int64 `xorm:"NOT NULL DEFAULT 0"` + RepoSize int64 `xorm:"NOT NULL DEFAULT 0"` + DatasetSize int64 `xorm:"NOT NULL DEFAULT 0"` + NumModels int64 `xorm:"NOT NULL DEFAULT 0"` + NumWikiViews int64 `xorm:"NOT NULL DEFAULT 0"` + NumCommits int64 `xorm:"NOT NULL DEFAULT 0"` + NumIssues int64 `xorm:"NOT NULL DEFAULT 0"` + NumPulls int64 `xorm:"NOT NULL DEFAULT 0"` + IssueFixedRate float32 `xorm:"NOT NULL"` + NumContributor int64 `xorm:"NOT NULL DEFAULT 0"` + NumKeyContributor int64 `xorm:"NOT NULL DEFAULT 0"` + + CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` + UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` +} + +func DeleteRepoStatDaily(date string) error { + sess := xStatistic.NewSession() + defer sess.Close() + if err := sess.Begin(); err != nil { + return fmt.Errorf("Begin: %v", err) + } + + if _, err := sess.Where("date = ?", date).Delete(&RepoStatistic{}); err != nil { + return fmt.Errorf("Delete: %v", err) + } + + if err := sess.Commit(); err != nil { + sess.Close() + return fmt.Errorf("Commit: %v", err) + } + + sess.Close() + return nil +} + +func InsertRepoStat(repoStat *RepoStatistic) (int64, error) { + return xStatistic.Insert(repoStat) +} diff --git a/models/repo_watch.go b/models/repo_watch.go index 11cfa8891..6bdef9c7f 100644 --- a/models/repo_watch.go +++ b/models/repo_watch.go @@ -26,10 +26,11 @@ const ( // Watch is connection request for receiving repository notification. type Watch struct { - ID int64 `xorm:"pk autoincr"` - UserID int64 `xorm:"UNIQUE(watch)"` - RepoID int64 `xorm:"UNIQUE(watch)"` - Mode RepoWatchMode `xorm:"SMALLINT NOT NULL DEFAULT 1"` + ID int64 `xorm:"pk autoincr"` + UserID int64 `xorm:"UNIQUE(watch)"` + RepoID int64 `xorm:"UNIQUE(watch)"` + Mode RepoWatchMode `xorm:"SMALLINT NOT NULL DEFAULT 1"` + CreatedUnix int64 `xorm:"created"` } // getWatch gets what kind of subscription a user has on a given repository; returns dummy record if none found diff --git a/models/star.go b/models/star.go index 4e84a6e4d..418a76b17 100644 --- a/models/star.go +++ b/models/star.go @@ -4,11 +4,14 @@ package models +import "code.gitea.io/gitea/modules/timeutil" + // Star represents a starred repo by an user. type Star struct { - ID int64 `xorm:"pk autoincr"` - UID int64 `xorm:"UNIQUE(s)"` - RepoID int64 `xorm:"UNIQUE(s)"` + ID int64 `xorm:"pk autoincr"` + UID int64 `xorm:"UNIQUE(s)"` + RepoID int64 `xorm:"UNIQUE(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` } // StarRepo or unstar repository. @@ -39,7 +42,7 @@ func StarRepo(userID, repoID int64, star bool) error { return nil } - if _, err := sess.Delete(&Star{0, userID, repoID}); err != nil { + if _, err := sess.Delete(&Star{0, userID, repoID, 0}); err != nil { return err } if _, err := sess.Exec("UPDATE `repository` SET num_stars = num_stars - 1 WHERE id = ?", repoID); err != nil { @@ -59,7 +62,7 @@ func IsStaring(userID, repoID int64) bool { } func isStaring(e Engine, userID, repoID int64) bool { - has, _ := e.Get(&Star{0, userID, repoID}) + has, _ := e.Get(&Star{0, userID, repoID, 0}) return has } diff --git a/models/user.go b/models/user.go index 38f699740..78ab4627a 100755 --- a/models/user.go +++ b/models/user.go @@ -1556,6 +1556,18 @@ func GetUserByActivateEmail(email string) (*User, error) { if len(users) >= 1 { return &users[0],nil }else { + // Finally, if email address is the protected email address:用户邮件地址设置为隐藏电子邮件地址 + if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) { + username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) + user := &User{LowerName: username} + has, err := ctx.e.Get(user) + if err != nil { + return nil, err + } + if has { + return user, nil + } + } return nil, errors.New("cannot find user by email") } } diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go new file mode 100644 index 000000000..a7d549cd4 --- /dev/null +++ b/models/user_business_analysis.go @@ -0,0 +1,381 @@ +package models + +import ( + "fmt" + "time" + + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/timeutil" +) + +type UserBusinessAnalysis struct { + ID int64 `xorm:"pk"` + + CountDate int64 `xorm:"pk"` + + //action :ActionMergePullRequest // 11 + CodeMergeCount int `xorm:"NOT NULL DEFAULT 0"` + + //action :ActionCommitRepo // 5 + CommitCount int `xorm:"NOT NULL DEFAULT 0"` + + //action :ActionCommentIssue // 10 + IssueCount int `xorm:"NOT NULL DEFAULT 0"` + + //comment table current date + CommentCount int `xorm:"NOT NULL DEFAULT 0"` + + //watch table current date + FocusRepoCount int `xorm:"NOT NULL DEFAULT 0"` + + //star table current date + StarRepoCount int `xorm:"NOT NULL DEFAULT 0"` + + //follow table + WatchedCount int `xorm:"NOT NULL DEFAULT 0"` + + // user table + GiteaAgeMonth int `xorm:"NOT NULL DEFAULT 0"` + + // + CommitCodeSize int `xorm:"NOT NULL DEFAULT 0"` + + //attachement table + CommitDatasetSize int `xorm:"NOT NULL DEFAULT 0"` + + //0 + CommitModelCount int `xorm:"NOT NULL DEFAULT 0"` + + //issue, issueassignees + SolveIssueCount int `xorm:"NOT NULL DEFAULT 0"` + + //baike + EncyclopediasCount int `xorm:"NOT NULL DEFAULT 0"` + + //user + RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` + + //repo + CreateRepoCount int `xorm:"NOT NULL DEFAULT 0"` + + //login count, from elk + LoginCount int `xorm:"NOT NULL DEFAULT 0"` + + //openi index + OpenIIndex int `xorm:"NOT NULL DEFAULT 0"` + + //user + Email string `xorm:"NOT NULL"` + + //user + Name string `xorm:"NOT NULL"` +} + +func CountData(wikiCountMap map[string]int) { + log.Info("start to count other user info data") + sess := x.NewSession() + defer sess.Close() + sess.Select("`user`.*").Table("user") + userList := make([]*User, 0) + sess.Find(&userList) + + currentTimeNow := time.Now() + log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05")) + + yesterday := currentTimeNow.AddDate(0, 0, -1) + startTime := time.Date(yesterday.Year(), yesterday.Month(), yesterday.Day(), 0, 0, 0, 0, yesterday.Location()) + start_unix := startTime.Unix() + log.Info("DB query time:" + startTime.Format("2006-01-02 15:04:05")) + + endTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, currentTimeNow.Location()) + end_unix := endTime.Unix() + + CountDate := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 1, 0, 0, currentTimeNow.Location()) + + CodeMergeCountMap := queryAction(start_unix, end_unix, 11) + CommitCountMap := queryAction(start_unix, end_unix, 5) + IssueCountMap := queryAction(start_unix, end_unix, 10) + + CommentCountMap := queryComment(start_unix, end_unix) + FocusRepoCountMap := queryWatch(start_unix, end_unix) + StarRepoCountMap := queryStar(start_unix, end_unix) + WatchedCountMap := queryFollow(start_unix, end_unix) + + CommitCodeSizeMap, err := GetAllUserKPIStats() + if err != nil { + log.Info("query commit code errr.") + } else { + log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) + } + CommitDatasetSizeMap := queryDatasetSize(start_unix, end_unix) + SolveIssueCountMap := querySolveIssue(start_unix, end_unix) + CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix) + + for i, userRecord := range userList { + var dateRecord UserBusinessAnalysis + dateRecord.ID = userRecord.ID + log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) + dateRecord.CountDate = CountDate.Unix() + dateRecord.Email = userRecord.Email + dateRecord.RegistDate = userRecord.CreatedUnix + dateRecord.Name = userRecord.Name + dateRecord.GiteaAgeMonth = subMonth(currentTimeNow, userRecord.CreatedUnix.AsTime()) + if _, ok := CodeMergeCountMap[dateRecord.ID]; !ok { + dateRecord.CodeMergeCount = 0 + } else { + dateRecord.CodeMergeCount = CodeMergeCountMap[dateRecord.ID] + } + + if _, ok := CommitCountMap[dateRecord.ID]; !ok { + dateRecord.CommitCount = 0 + } else { + dateRecord.CommitCount = CommitCountMap[dateRecord.ID] + } + + if _, ok := IssueCountMap[dateRecord.ID]; !ok { + dateRecord.IssueCount = 0 + } else { + dateRecord.IssueCount = IssueCountMap[dateRecord.ID] + } + + if _, ok := CommentCountMap[dateRecord.ID]; !ok { + dateRecord.CommentCount = 0 + } else { + dateRecord.CommentCount = CommentCountMap[dateRecord.ID] + } + + if _, ok := FocusRepoCountMap[dateRecord.ID]; !ok { + dateRecord.FocusRepoCount = 0 + } else { + dateRecord.FocusRepoCount = FocusRepoCountMap[dateRecord.ID] + } + + if _, ok := StarRepoCountMap[dateRecord.ID]; !ok { + dateRecord.StarRepoCount = 0 + } else { + dateRecord.StarRepoCount = StarRepoCountMap[dateRecord.ID] + } + + if _, ok := WatchedCountMap[dateRecord.ID]; !ok { + dateRecord.WatchedCount = 0 + } else { + dateRecord.WatchedCount = WatchedCountMap[dateRecord.ID] + } + + if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok { + dateRecord.CommitCodeSize = 0 + } else { + dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines) + } + + if _, ok := CommitDatasetSizeMap[dateRecord.ID]; !ok { + dateRecord.CommitDatasetSize = 0 + } else { + dateRecord.CommitDatasetSize = CommitDatasetSizeMap[dateRecord.ID] + } + + if _, ok := SolveIssueCountMap[dateRecord.ID]; !ok { + dateRecord.SolveIssueCount = 0 + } else { + dateRecord.SolveIssueCount = SolveIssueCountMap[dateRecord.ID] + } + + if _, ok := wikiCountMap[dateRecord.Name]; !ok { + dateRecord.EncyclopediasCount = 0 + } else { + dateRecord.EncyclopediasCount = wikiCountMap[dateRecord.Name] + } + + if _, ok := CreateRepoCountMap[dateRecord.ID]; !ok { + dateRecord.CreateRepoCount = 0 + } else { + dateRecord.CreateRepoCount = CreateRepoCountMap[dateRecord.ID] + } + + dateRecord.CommitModelCount = 0 + + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + statictisSess.Insert(&dateRecord) + } + +} + +func querySolveIssue(start_unix int64, end_unix int64) map[int64]int { + //select issue_assignees.* from issue_assignees,issue where issue.is_closed=true and issue.id=issue_assignees.issue_id + sess := x.NewSession() + defer sess.Close() + sess.Select("issue_assignees.*").Table("issue_assignees"). + Join("inner", "issue", "issue.id=issue_assignees.issue_id"). + Where("issue.is_closed=true and issue.closed_unix>=" + fmt.Sprint(start_unix) + " and issue.closed_unix<=" + fmt.Sprint(end_unix)) + issueAssigneesList := make([]*IssueAssignees, 0) + sess.Find(&issueAssigneesList) + resultMap := make(map[int64]int) + log.Info("query IssueAssignees size=" + fmt.Sprint(len(issueAssigneesList))) + for _, issueAssigneesRecord := range issueAssigneesList { + if _, ok := resultMap[issueAssigneesRecord.AssigneeID]; !ok { + resultMap[issueAssigneesRecord.AssigneeID] = 1 + } else { + resultMap[issueAssigneesRecord.AssigneeID] += 1 + } + } + return resultMap + +} + +func queryAction(start_unix int64, end_unix int64, actionType int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + sess.Select("id,user_id,op_type,act_user_id").Table("action").Where("op_type=" + fmt.Sprint(actionType) + " and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + actionList := make([]*Action, 0) + sess.Find(&actionList) + resultMap := make(map[int64]int) + log.Info("query action size=" + fmt.Sprint(len(actionList))) + for _, actionRecord := range actionList { + if _, ok := resultMap[actionRecord.UserID]; !ok { + resultMap[actionRecord.UserID] = 1 + } else { + resultMap[actionRecord.UserID] += 1 + } + } + return resultMap +} + +func queryComment(start_unix int64, end_unix int64) map[int64]int { + + sess := x.NewSession() + defer sess.Close() + sess.Select("id,type,poster_id").Table("comment").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + commentList := make([]*Comment, 0) + sess.Find(&commentList) + resultMap := make(map[int64]int) + log.Info("query Comment size=" + fmt.Sprint(len(commentList))) + for _, commentRecord := range commentList { + if _, ok := resultMap[commentRecord.PosterID]; !ok { + resultMap[commentRecord.PosterID] = 1 + } else { + resultMap[commentRecord.PosterID] += 1 + } + } + return resultMap +} + +func queryWatch(start_unix int64, end_unix int64) map[int64]int { + + sess := x.NewSession() + defer sess.Close() + sess.Select("id,user_id,repo_id").Table("watch").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + watchList := make([]*Watch, 0) + sess.Find(&watchList) + resultMap := make(map[int64]int) + log.Info("query Watch size=" + fmt.Sprint(len(watchList))) + for _, watchRecord := range watchList { + if _, ok := resultMap[watchRecord.UserID]; !ok { + resultMap[watchRecord.UserID] = 1 + } else { + resultMap[watchRecord.UserID] += 1 + } + } + return resultMap + +} + +func queryStar(start_unix int64, end_unix int64) map[int64]int { + + sess := x.NewSession() + defer sess.Close() + sess.Select("id,uid,repo_id").Table("star").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + starList := make([]*Star, 0) + sess.Find(&starList) + resultMap := make(map[int64]int) + log.Info("query Star size=" + fmt.Sprint(len(starList))) + for _, starRecord := range starList { + if _, ok := resultMap[starRecord.UID]; !ok { + resultMap[starRecord.UID] = 1 + } else { + resultMap[starRecord.UID] += 1 + } + } + return resultMap + +} + +func queryFollow(start_unix int64, end_unix int64) map[int64]int { + + sess := x.NewSession() + defer sess.Close() + sess.Select("id,user_id,follow_id").Table("follow").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + followList := make([]*Follow, 0) + sess.Find(&followList) + resultMap := make(map[int64]int) + log.Info("query Follow size=" + fmt.Sprint(len(followList))) + for _, followRecord := range followList { + if _, ok := resultMap[followRecord.UserID]; !ok { + resultMap[followRecord.UserID] = 1 + } else { + resultMap[followRecord.UserID] += 1 + } + } + return resultMap +} + +func queryDatasetSize(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + sess.Select("id,uploader_id,size").Table("attachment").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + attachmentList := make([]*Attachment, 0) + sess.Find(&attachmentList) + resultMap := make(map[int64]int) + log.Info("query Attachment size=" + fmt.Sprint(len(attachmentList))) + for _, attachRecord := range attachmentList { + if _, ok := resultMap[attachRecord.UploaderID]; !ok { + resultMap[attachRecord.UploaderID] = int(attachRecord.Size / (1024 * 1024)) //MB + } else { + resultMap[attachRecord.UploaderID] += int(attachRecord.Size / (1024 * 1024)) //MB + } + } + return resultMap + +} + +func queryUserCreateRepo(start_unix int64, end_unix int64) map[int64]int { + sess := x.NewSession() + defer sess.Close() + sess.Select("id,owner_id,name").Table("repository").Where(" created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)) + repoList := make([]*Repository, 0) + sess.Find(&repoList) + resultMap := make(map[int64]int) + log.Info("query Repository size=" + fmt.Sprint(len(repoList))) + for _, repoRecord := range repoList { + if _, ok := resultMap[repoRecord.OwnerID]; !ok { + resultMap[repoRecord.OwnerID] = 1 + } else { + resultMap[repoRecord.OwnerID] += 1 + } + } + return resultMap + +} + +func subMonth(t1, t2 time.Time) (month int) { + y1 := t1.Year() + y2 := t2.Year() + m1 := int(t1.Month()) + m2 := int(t2.Month()) + d1 := t1.Day() + d2 := t2.Day() + + yearInterval := y1 - y2 + // 如果 d1的 月-日 小于 d2的 月-日 那么 yearInterval-- 这样就得到了相差的年数 + if m1 < m2 || m1 == m2 && d1 < d2 { + yearInterval-- + } + // 获取月数差值 + monthInterval := (m1 + 12) - m2 + if d1 < d2 { + monthInterval-- + } + monthInterval %= 12 + month = yearInterval*12 + monthInterval + return month +} diff --git a/models/user_follow.go b/models/user_follow.go index 4bde71cb9..b63535bb9 100644 --- a/models/user_follow.go +++ b/models/user_follow.go @@ -4,11 +4,14 @@ package models +import "code.gitea.io/gitea/modules/timeutil" + // Follow represents relations of user and his/her followers. type Follow struct { - ID int64 `xorm:"pk autoincr"` - UserID int64 `xorm:"UNIQUE(follow)"` - FollowID int64 `xorm:"UNIQUE(follow)"` + ID int64 `xorm:"pk autoincr"` + UserID int64 `xorm:"UNIQUE(follow)"` + FollowID int64 `xorm:"UNIQUE(follow)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` } // IsFollowing returns true if user is following followID. diff --git a/modules/cron/tasks_basic.go b/modules/cron/tasks_basic.go old mode 100644 new mode 100755 index f42710618..26cd16778 --- a/modules/cron/tasks_basic.go +++ b/modules/cron/tasks_basic.go @@ -163,6 +163,28 @@ func registerHandleBlockChainUnSuccessCommits() { }) } +func registerHandleRepoStatistic() { + RegisterTaskFatal("handle_repo_statistic", &BaseConfig{ + Enabled: true, + RunAtStart: false, + Schedule: "@daily", + }, func(ctx context.Context, _ *models.User, _ Config) error { + repo.RepoStatisticAuto() + return nil + }) +} + +func registerHandleUserStatistic() { + RegisterTaskFatal("handle_user_statistic", &BaseConfig{ + Enabled: true, + RunAtStart: false, + Schedule: "@daily", + }, func(ctx context.Context, _ *models.User, _ Config) error { + repo.TimingCountData() + return nil + }) +} + func initBasicTasks() { registerUpdateMirrorTask() registerRepoHealthCheck() @@ -177,4 +199,7 @@ func initBasicTasks() { registerHandleBlockChainUnSuccessRepos() registerHandleBlockChainMergedPulls() registerHandleBlockChainUnSuccessCommits() + + registerHandleRepoStatistic() + registerHandleUserStatistic() } diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index c5f6d6cdd..a51a402c6 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -206,6 +206,15 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) { return commits.Front().Value.(*Commit), nil } +func (repo *Repository) GetCommitByPathAndDays(relpath string, days int) (*list.List, error) { + stdout, err := NewCommand("log", "-1", prettyLogFormat, "--since="+fmt.Sprint(days)+".days").RunInDirBytes(relpath) + if err != nil { + return nil, err + } + + return repo.parsePrettyFormatLogToList(stdout) +} + // CommitsRangeSize the default commits range size var CommitsRangeSize = 50 diff --git a/modules/git/repo_stats_custom.go b/modules/git/repo_stats_custom.go new file mode 100644 index 000000000..f7556d5c2 --- /dev/null +++ b/modules/git/repo_stats_custom.go @@ -0,0 +1,369 @@ +package git + +import ( + "bufio" + "bytes" + "fmt" + "net/url" + "sort" + "strconv" + "strings" + "time" + + Log "code.gitea.io/gitea/modules/log" +) + +type RepoKPIStats struct { + Contributors int64 + KeyContributors int64 + DevelopAge int64 + ContributorsAdded int64 + CommitsAdded int64 + CommitLinesModified int64 + WikiPages int64 + Authors []*UserKPITypeStats +} + +type UserKPIStats struct { + Name string + Email string + Commits int64 + CommitLines int64 +} +type UserKPITypeStats struct { + UserKPIStats + isNewContributor bool //是否是4个月内的新增贡献者 +} + +func GetRepoKPIStats(repoPath string, wikiPath string) (*RepoKPIStats, error) { + stats := &RepoKPIStats{} + + contributors, err := GetContributors(repoPath) + if err != nil { + return nil, err + } + timeUntil := time.Now() + fourMonthAgo := timeUntil.AddDate(0, -4, 0) + recentlyContributors, err := getContributors(repoPath, fourMonthAgo) + newContributersDict := make(map[string]struct{}) + if err != nil { + return nil, err + } + + if contributors != nil { + stats.Contributors = int64(len(contributors)) + for _, contributor := range contributors { + if contributor.CommitCnt >= 3 { + stats.KeyContributors++ + } + + if recentlyContributors != nil { + for _, recentlyContributor := range recentlyContributors { + if recentlyContributor.Email == contributor.Email && recentlyContributor.CommitCnt == contributor.CommitCnt { + stats.ContributorsAdded++ + newContributersDict[recentlyContributor.Email] = struct{}{} + } + + } + } + + } + + } + + err = setDevelopAge(repoPath, stats) + if err != nil { + return nil, fmt.Errorf("FillFromGit: %v", err) + } + err = setRepoKPIStats(repoPath, fourMonthAgo, stats, newContributersDict) + + if err != nil { + return nil, fmt.Errorf("FillFromGit: %v", err) + } + + setWikiPages(wikiPath, stats) + return stats, nil + +} + +func setDevelopAge(repoPath string, stats *RepoKPIStats) error { + args := []string{"log", "--no-merges", "--branches=*", "--format=%cd", "--date=short"} + stdout, err := NewCommand(args...).RunInDirBytes(repoPath) + if err != nil { + return err + } + scanner := bufio.NewScanner(bytes.NewReader(stdout)) + scanner.Split(bufio.ScanLines) + developMonth := make(map[string]struct{}) + for scanner.Scan() { + l := strings.TrimSpace(scanner.Text()) + month := l[0:strings.LastIndex(l, "-")] + if _, ok := developMonth[month]; !ok { + developMonth[month] = struct{}{} + } + } + + stats.DevelopAge = int64(len(developMonth)) + return nil +} + +//获取一天内的用户贡献指标 +func GetUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) { + timeUntil := time.Now() + oneDayAgo := timeUntil.AddDate(0, 0, -1) + since := oneDayAgo.Format(time.RFC3339) + args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} + stdout, err := NewCommand(args...).RunInDirBytes(repoPath) + if err != nil { + return nil, err + } + scanner := bufio.NewScanner(bytes.NewReader(stdout)) + scanner.Split(bufio.ScanLines) + usersKPIStatses := make(map[string]*UserKPIStats) + var author string + p := 0 + var email string + for scanner.Scan() { + l := strings.TrimSpace(scanner.Text()) + if l == "---" { + p = 1 + } else if p == 0 { + continue + } else { + p++ + } + if p > 4 && len(l) == 0 { + continue + } + switch p { + case 1: // Separator + case 2: // Commit sha-1 + case 3: // Author + author = l + case 4: // E-mail + email = strings.ToLower(l) + if _, ok := usersKPIStatses[email]; !ok { + usersKPIStatses[email] = &UserKPIStats{ + Name: author, + Email: email, + Commits: 0, + CommitLines: 0, + } + } + + usersKPIStatses[email].Commits++ + default: // Changed file + if parts := strings.Fields(l); len(parts) >= 3 { + if parts[0] != "-" { + if c, err := strconv.ParseInt(strings.TrimSpace(parts[0]), 10, 64); err == nil { + usersKPIStatses[email].CommitLines += c + } + } + if parts[1] != "-" { + if c, err := strconv.ParseInt(strings.TrimSpace(parts[1]), 10, 64); err == nil { + usersKPIStatses[email].CommitLines += c + } + } + + } + } + } + + return usersKPIStatses, nil + +} + +func setRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error { + since := fromTime.Format(time.RFC3339) + args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--since='%s'", since)} + + stdout, err := NewCommand(args...).RunInDirBytes(repoPath) + if err != nil { + return err + } + + scanner := bufio.NewScanner(bytes.NewReader(stdout)) + scanner.Split(bufio.ScanLines) + + authors := make(map[string]*UserKPITypeStats) + + var author string + p := 0 + var email string + for scanner.Scan() { + l := strings.TrimSpace(scanner.Text()) + if l == "---" { + p = 1 + } else if p == 0 { + continue + } else { + p++ + } + if p > 4 && len(l) == 0 { + continue + } + switch p { + case 1: // Separator + case 2: // Commit sha-1 + stats.CommitsAdded++ + case 3: // Author + author = l + case 4: // E-mail + email = strings.ToLower(l) + if _, ok := authors[email]; !ok { + authors[email] = &UserKPITypeStats{ + UserKPIStats: UserKPIStats{ + Name: author, + Email: email, + Commits: 0, + CommitLines: 0, + }, + isNewContributor: false, + } + } + if _, ok := newContributers[email]; ok { + authors[email].isNewContributor = true + } + + authors[email].Commits++ + default: // Changed file + if parts := strings.Fields(l); len(parts) >= 3 { + if parts[0] != "-" { + if c, err := strconv.ParseInt(strings.TrimSpace(parts[0]), 10, 64); err == nil { + stats.CommitLinesModified += c + authors[email].CommitLines += c + } + } + if parts[1] != "-" { + if c, err := strconv.ParseInt(strings.TrimSpace(parts[1]), 10, 64); err == nil { + stats.CommitLinesModified += c + authors[email].CommitLines += c + } + } + + } + } + } + + a := make([]*UserKPITypeStats, 0, len(authors)) + for _, v := range authors { + a = append(a, v) + } + // Sort authors descending depending on commit count + sort.Slice(a, func(i, j int) bool { + return a[i].Commits > a[j].Commits + }) + + stats.Authors = a + return nil + +} + +func getContributors(repoPath string, fromTime time.Time) ([]Contributor, error) { + since := fromTime.Format(time.RFC3339) + cmd := NewCommand("shortlog", "-sne", "--all", fmt.Sprintf("--since='%s'", since)) + stdout, err := cmd.RunInDir(repoPath) + if err != nil { + return nil, err + } + stdout = strings.Trim(stdout, "\n") + contributorRows := strings.Split(stdout, "\n") + if len(contributorRows) > 0 { + contributorsInfo := make([]Contributor, len(contributorRows)) + for i := 0; i < len(contributorRows); i++ { + var oneCount string = strings.Trim(contributorRows[i], " ") + if strings.Index(oneCount, "\t") < 0 { + continue + } + number := oneCount[0:strings.Index(oneCount, "\t")] + commitCnt, _ := strconv.Atoi(number) + committer := oneCount[strings.Index(oneCount, "\t")+1 : strings.LastIndex(oneCount, " ")] + committer = strings.Trim(committer, " ") + email := oneCount[strings.Index(oneCount, "<")+1 : strings.Index(oneCount, ">")] + contributorsInfo[i] = Contributor{ + commitCnt, committer, email, + } + } + return contributorsInfo, nil + } + return nil, nil +} + +func setWikiPages(wikiPath string, stats *RepoKPIStats) { + wikiPages := 0 + + if wikiPath == "" { + stats.WikiPages = int64(wikiPages) + return + } + + wikiRepo, commit, err := findWikiRepoCommit(wikiPath) + if err != nil { + if !IsErrNotExist(err) { + Log.Warn("GetBranchCommit", err) + } + stats.WikiPages = int64(wikiPages) + return + } + + // Get page list. + entries, err := commit.ListEntries() + if err != nil { + if wikiRepo != nil { + wikiRepo.Close() + } + Log.Warn("GetBranchCommit", err) + stats.WikiPages = int64(wikiPages) + return + + } + + for _, entry := range entries { + if !entry.IsRegular() { + continue + } + + wikiName, err := filenameToName(entry.Name()) + if err != nil || wikiName == "_Sidebar" || wikiName == "_Footer" { + continue + } + + wikiPages += 1 + + } + //确保wikiRepo用完被关闭 + defer func() { + if wikiRepo != nil { + wikiRepo.Close() + } + }() + stats.WikiPages = int64(wikiPages) + return + +} + +func filenameToName(filename string) (string, error) { + if !strings.HasSuffix(filename, ".md") { + return "", fmt.Errorf("invalid file") + } + basename := filename[:len(filename)-3] + unescaped, err := url.QueryUnescape(basename) + if err != nil { + return "", err + } + return strings.Replace(unescaped, "-", " ", -1), nil +} + +func findWikiRepoCommit(wikiPath string) (*Repository, *Commit, error) { + wikiRepo, err := OpenRepository(wikiPath) + if err != nil { + + return nil, nil, err + } + + commit, err := wikiRepo.GetBranchCommit("master") + if err != nil { + return wikiRepo, nil, err + } + return wikiRepo, commit, nil +} diff --git a/modules/repository/elk_pagedata.go b/modules/repository/elk_pagedata.go new file mode 100644 index 000000000..bb027726d --- /dev/null +++ b/modules/repository/elk_pagedata.go @@ -0,0 +1,312 @@ +package repository + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + "code.gitea.io/gitea/modules/setting" +) + +//输入elk的json结构begin +type InputInfo struct { + Batch []Batch `json:"batch"` +} +type Fields struct { + Field string `json:"field"` + Format string `json:"format"` +} +type MatchPhrase struct { + Message string `json:"message"` +} +type Should struct { + MatchPhrase MatchPhrase `json:"match_phrase"` +} +type Bool struct { + Should []Should `json:"should"` + MinimumShouldMatch int `json:"minimum_should_match"` +} +type Timestamptest struct { + // Gte time.Time `json:"gte"` + Gte string `json:"gte"` + Lte string `json:"lte"` + Format string `json:"format"` +} +type Range struct { + Timestamptest Timestamptest `json:"@timestamptest"` +} + +type FilterMatchPhrase struct { + UserName string `json:"userName.keyword,omitempty"` + ProjectName string `json:"projectName.keyword,omitempty"` + TagName string `json:"tagName.keyword,omitempty"` +} + +type Filter struct { + Bool *Bool `json:"bool,omitempty"` + Range *Range `json:"range,omitempty"` + FilterMatchPhrase *FilterMatchPhrase `json:"match_phrase,omitempty"` +} +type MustNotMatchPhrase struct { + ProjectName string `json:"projectName"` +} +type MustNot struct { + MustNotMatchPhrase MustNotMatchPhrase `json:"match_phrase"` +} +type BoolIn struct { + Filter []Filter `json:"filter"` + MustNot []MustNot `json:"must_not"` +} +type Query struct { + BoolIn BoolIn `json:"bool"` +} +type Body struct { + Size int `json:"size"` + Fields []Fields `json:"fields"` + Query Query `json:"query"` +} +type Params struct { + Index string `json:"index"` + Body Body `json:"body"` +} +type Request struct { + Params Params `json:"params"` +} +type Batch struct { + Request Request `json:"request"` +} + +//输入elk的json结构end + +//elk输出的json结构begin +type Hits struct { + Total int `json:"total"` +} +type RawResponse struct { + Hits Hits `json:"hits"` +} +type Result struct { + RawResponse RawResponse `json:"rawResponse"` + Loaded int `json:"loaded"` +} +type ResultInfo struct { + Id int `json:"id"` + Result Result `json:"result"` +} + +//elk输出的json结构end + +//发送post请求到elk +func SendReqToElk(jsonStr []byte) (content string) { + ElkBase64Init := setting.ElkUser + ":" + setting.ElkPassword + ElkBase64 := base64.StdEncoding.EncodeToString([]byte(ElkBase64Init)) + BasicElkBase64 := "Basic" + " " + ElkBase64 + url := setting.ElkUrl + req, _ := http.NewRequest("POST", url, bytes.NewBuffer(jsonStr)) + req.Header.Set("Content-Type", "application/json") + req.Header.Set("kbn-version", "7.13.2") + req.Header.Set("Authorization", BasicElkBase64) + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + panic(err) + } + defer resp.Body.Close() + body, _ := ioutil.ReadAll(resp.Body) + return string(body) +} + +//处理返回的elk数据,只保留totalView,即访问量;loaded是分片载入次数,用来判断返回的数据是否准确 +func GetResultFromElk(resultinfo ResultInfo, jobResult string) (loaded int, totalView int) { + var resultTest ResultInfo + errs := json.Unmarshal([]byte(jobResult), &resultTest) + fmt.Println(errs) + return resultTest.Result.Loaded, resultTest.Result.RawResponse.Hits.Total +} + +//初始化传给elk的数据结构,给定用户名和项目名,查询的起止时间,返回初始化后的结构 +func ProjectViewInit(User string, Project string, Gte string, Lte string) (projectViewInit InputInfo) { + var inputStruct InputInfo + inputStruct.Batch = make([]Batch, 1) + inputStruct.Batch[0].Request.Params.Index = setting.Index + inputStruct.Batch[0].Request.Params.Body.Size = 0 + inputStruct.Batch[0].Request.Params.Body.Fields = make([]Fields, 1) + inputStruct.Batch[0].Request.Params.Body.Fields[0].Field = setting.TimeField + inputStruct.Batch[0].Request.Params.Body.Fields[0].Format = setting.ElkTimeFormat + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter = make([]Filter, 3) + //限定查询时间 + var timeRange Range + timeRange.Timestamptest.Gte = Gte + timeRange.Timestamptest.Lte = Lte + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[0].Range = &timeRange + //限定用户 + var userName FilterMatchPhrase + userName.UserName = User + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[1].FilterMatchPhrase = &userName + //限定项目 + var projectName FilterMatchPhrase + projectName.ProjectName = Project + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[2].FilterMatchPhrase = &projectName + return inputStruct +} + +//初始化传给elk的数据结构,给定查询信息和非项目名,查询的起止时间,返回初始化后的结构 +func AllProjectViewInit(MessageInfo string, NotProject string, Gte string, Lte string) (allProjectViewInit InputInfo) { + var inputStruct InputInfo + inputStruct.Batch = make([]Batch, 1) + inputStruct.Batch[0].Request.Params.Index = setting.Index + inputStruct.Batch[0].Request.Params.Body.Size = 0 + inputStruct.Batch[0].Request.Params.Body.Fields = make([]Fields, 1) + inputStruct.Batch[0].Request.Params.Body.Fields[0].Field = setting.TimeField + inputStruct.Batch[0].Request.Params.Body.Fields[0].Format = setting.ElkTimeFormat + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter = make([]Filter, 2) + //限定message + var bool Bool + bool.Should = make([]Should, 1) + bool.Should[0].MatchPhrase.Message = MessageInfo + bool.MinimumShouldMatch = 1 + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[0].Bool = &bool + //限定查询时间 + var timeRange Range + timeRange.Timestamptest.Gte = Gte + timeRange.Timestamptest.Lte = Lte + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[1].Range = &timeRange + //限定非项目 + // var boolIn BoolIn + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.MustNot = make([]MustNot, 1) + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.MustNot[0].MustNotMatchPhrase.ProjectName = NotProject + return inputStruct +} + +//初始化传给elk的数据结构,给定查询信息和tagName,查询的起止时间,返回初始化后的结构 +func TagNameInit(MessageInfo string, Tagname string, Gte string, Lte string) (projectViewInit InputInfo) { + var inputStruct InputInfo + inputStruct.Batch = make([]Batch, 1) + inputStruct.Batch[0].Request.Params.Index = setting.Index + inputStruct.Batch[0].Request.Params.Body.Size = 0 + inputStruct.Batch[0].Request.Params.Body.Fields = make([]Fields, 1) + inputStruct.Batch[0].Request.Params.Body.Fields[0].Field = setting.TimeField + inputStruct.Batch[0].Request.Params.Body.Fields[0].Format = setting.ElkTimeFormat + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter = make([]Filter, 3) + //限定message + var bool Bool + bool.Should = make([]Should, 1) + bool.Should[0].MatchPhrase.Message = MessageInfo + bool.MinimumShouldMatch = 1 + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[0].Bool = &bool + //限定tagName + var tagName FilterMatchPhrase + tagName.TagName = Tagname + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[1].FilterMatchPhrase = &tagName + //限定查询时间 + var timeRange Range + timeRange.Timestamptest.Gte = Gte + timeRange.Timestamptest.Lte = Lte + inputStruct.Batch[0].Request.Params.Body.Query.BoolIn.Filter[2].Range = &timeRange + return inputStruct +} + +//向elk发送请求,将获取的结果只保留访问量,输入是初始化后的数据结构,返回访问量 +func ViewInfo(viewInfo InputInfo) (totalView int) { + jsons, errs := json.Marshal(viewInfo) + if errs != nil { + fmt.Println("errs:", errs.Error()) + } + // fmt.Println("viewInfoInit:",string(jsons)) + var jsonStr = []byte(jsons) + var resultInfo ResultInfo + loaded, totalView := GetResultFromElk(resultInfo, SendReqToElk(jsonStr)) + time := 0 + for { + if loaded == 0 { + loaded_next, totalView := GetResultFromElk(resultInfo, SendReqToElk(jsonStr)) + time++ + if loaded_next != 0 && time < 100 { + fmt.Println("totalView:", totalView) + return totalView + } + if time > 100 { + break + } + } else { + break + } + } + fmt.Println("loaded:", loaded) + return totalView +} + +// @title ProjectView +// @description 获取指定用户和项目的访问量 +// @param User string "用户名" +// @param Project string "项目名" +// @param Gte string "起始时间" 如time.Now().AddDate(0, 0, -1).Format(time.RFC3339) +// @param Lte string "结束时间" 如time.Now().Format(time.RFC3339) +// @return totalView int "访问量" +func AppointProjectView(User string, Project string, Gte string, Lte string) (totalView int) { + InitInfo := ProjectViewInit(User, Project, Gte, Lte) + return ViewInfo(InitInfo) +} + +//统计项目相关页面的访问量 +type ProjectInfo struct { + /* 统计所有项目中该页面的浏览情况,不需要区分项目。以aiforge项目为例 */ + //地址:https://git.openi.org.cn/OpenI/aiforge/datasets?type=0 + Project_dataset_type_0 int + //地址:https://git.openi.org.cn/OpenI/aiforge/datasets?type=1 + Project_dataset_type_1 int + //地址:https://git.openi.org.cn/OpenI/aiforge/issues + Project_issues int + //地址:https://git.openi.org.cn/OpenI/aiforge/labels + Project_labels int + //地址:https://git.openi.org.cn/OpenI/aiforge/milestones + Project_milestones int + //地址:https://git.openi.org.cn/OpenI/aiforge/pulls + Project_pulls int + //地址:https://git.openi.org.cn/OpenI/aiforge/release + Project_release int + //地址:https://git.openi.org.cn/OpenI/aiforge/wiki + Project_wiki int + //地址:https://git.openi.org.cn/OpenI/aiforge/activity + Project_activity int + //地址:https://git.openi.org.cn/OpenI/aiforge/cloudbrain + Project_cloudbrain int + //地址:https://git.openi.org.cn/OpenI/aiforge/modelarts + Project_modelarts int + //地址:https://git.openi.org.cn/OpenI/aiforge/blockchain + Project_blockchain int + //地址:https://git.openi.org.cn/OpenI/aiforge/watchers + Project_watchers int + //地址:https://git.openi.org.cn/OpenI/aiforge/stars + Project_stars int + //地址:https://git.openi.org.cn/OpenI/aiforge/forks + Project_forks int +} + +// @title AllProjectView +// @description 获取指定用户和项目的访问量 +// @param Gte string "起始时间" 如time.Now().AddDate(0, 0, -1).Format(time.RFC3339) +// @param Lte string "结束时间" +// @return projectInfo ProjectInfo "统计所有项目中页面的浏览情况,不需要区分项目" +func AllProjectView(Gte string, Lte string) (projectInfo ProjectInfo) { + projectInfo.Project_dataset_type_0 = ViewInfo(AllProjectViewInit("/datasets?type=0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_dataset_type_1 = ViewInfo(AllProjectViewInit("/datasets?type=1", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_issues = ViewInfo(AllProjectViewInit("/issues HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_labels = ViewInfo(TagNameInit("/labels HTTP/2.0", "labels", Gte, Lte)) + projectInfo.Project_milestones = ViewInfo(AllProjectViewInit("/milestones HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_pulls = ViewInfo(AllProjectViewInit("/pulls HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_release = ViewInfo(AllProjectViewInit("/release HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_wiki = ViewInfo(AllProjectViewInit("/wiki HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_activity = ViewInfo(AllProjectViewInit("/activity HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_cloudbrain = ViewInfo(AllProjectViewInit("/cloudbrain HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_modelarts = ViewInfo(AllProjectViewInit("/modelarts HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_blockchain = ViewInfo(AllProjectViewInit("/blockchain HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_watchers = ViewInfo(AllProjectViewInit("/watchers HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_stars = ViewInfo(AllProjectViewInit("/stars HTTP/2.0", "%{[request][2]}", Gte, Lte)) + projectInfo.Project_forks = ViewInfo(AllProjectViewInit("/forks HTTP/2.0", "%{[request][2]}", Gte, Lte)) + return projectInfo +} diff --git a/modules/setting/database.go b/modules/setting/database.go old mode 100644 new mode 100755 index 85043e8c1..e4c3a4149 --- a/modules/setting/database.go +++ b/modules/setting/database.go @@ -24,111 +24,119 @@ var ( EnableSQLite3 bool // Database holds the database settings - Database = struct { - Type string - Host string - Name string - User string - Passwd string - Schema string - SSLMode string - Path string - LogSQL bool - Charset string - Timeout int // seconds - UseSQLite3 bool - UseMySQL bool - UseMSSQL bool - UsePostgreSQL bool - DBConnectRetries int - DBConnectBackoff time.Duration - MaxIdleConns int - MaxOpenConns int - ConnMaxLifetime time.Duration - IterateBufferSize int - }{ - Timeout: 500, - } + Database *DBInfo + DatabaseStatistic *DBInfo ) +type DBInfo struct { + Type string + Host string + Name string + User string + Passwd string + Schema string + SSLMode string + Path string + LogSQL bool + Charset string + Timeout int // seconds + UseSQLite3 bool + UseMySQL bool + UseMSSQL bool + UsePostgreSQL bool + DBConnectRetries int + DBConnectBackoff time.Duration + MaxIdleConns int + MaxOpenConns int + ConnMaxLifetime time.Duration + IterateBufferSize int +} + // GetDBTypeByName returns the dataase type as it defined on XORM according the given name func GetDBTypeByName(name string) string { return dbTypes[name] } -// InitDBConfig loads the database settings -func InitDBConfig() { - sec := Cfg.Section("database") - Database.Type = sec.Key("DB_TYPE").String() - switch Database.Type { +// initDBConfig loads the database settings +func initDBConfig(section string, database *DBInfo) { + sec := Cfg.Section(section) + database.Type = sec.Key("DB_TYPE").String() + switch database.Type { case "sqlite3": - Database.UseSQLite3 = true + database.UseSQLite3 = true case "mysql": - Database.UseMySQL = true + database.UseMySQL = true case "postgres": - Database.UsePostgreSQL = true + database.UsePostgreSQL = true case "mssql": - Database.UseMSSQL = true + database.UseMSSQL = true } - Database.Host = sec.Key("HOST").String() - Database.Name = sec.Key("NAME").String() - Database.User = sec.Key("USER").String() - if len(Database.Passwd) == 0 { - Database.Passwd = sec.Key("PASSWD").String() + database.Host = sec.Key("HOST").String() + database.Name = sec.Key("NAME").String() + database.User = sec.Key("USER").String() + if len(database.Passwd) == 0 { + database.Passwd = sec.Key("PASSWD").String() } - Database.Schema = sec.Key("SCHEMA").String() - Database.SSLMode = sec.Key("SSL_MODE").MustString("disable") - Database.Charset = sec.Key("CHARSET").In("utf8", []string{"utf8", "utf8mb4"}) - Database.Path = sec.Key("PATH").MustString(filepath.Join(AppDataPath, "gitea.db")) - Database.Timeout = sec.Key("SQLITE_TIMEOUT").MustInt(500) - Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2) - if Database.UseMySQL { - Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFE_TIME").MustDuration(3 * time.Second) + database.Schema = sec.Key("SCHEMA").String() + database.SSLMode = sec.Key("SSL_MODE").MustString("disable") + database.Charset = sec.Key("CHARSET").In("utf8", []string{"utf8", "utf8mb4"}) + database.Path = sec.Key("PATH").MustString(filepath.Join(AppDataPath, "gitea.db")) + database.Timeout = sec.Key("SQLITE_TIMEOUT").MustInt(500) + database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2) + if database.UseMySQL { + database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFE_TIME").MustDuration(3 * time.Second) } else { - Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFE_TIME").MustDuration(0) + database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFE_TIME").MustDuration(0) } - Database.MaxOpenConns = sec.Key("MAX_OPEN_CONNS").MustInt(0) + database.MaxOpenConns = sec.Key("MAX_OPEN_CONNS").MustInt(0) - Database.IterateBufferSize = sec.Key("ITERATE_BUFFER_SIZE").MustInt(50) - Database.LogSQL = sec.Key("LOG_SQL").MustBool(true) - Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10) - Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second) + database.IterateBufferSize = sec.Key("ITERATE_BUFFER_SIZE").MustInt(50) + database.LogSQL = sec.Key("LOG_SQL").MustBool(true) + database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10) + database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second) +} + +func InitDBConfig() { + Database = new(DBInfo) + DatabaseStatistic = new(DBInfo) + initDBConfig("database", Database) + initDBConfig("database_statistic", DatabaseStatistic) } // DBConnStr returns database connection string -func DBConnStr() (string, error) { +func DBConnStr(database *DBInfo) (string, error) { connStr := "" var Param = "?" - if strings.Contains(Database.Name, Param) { + if strings.Contains(database.Name, Param) { Param = "&" } - switch Database.Type { + switch database.Type { case "mysql": connType := "tcp" - if Database.Host[0] == '/' { // looks like a unix socket + if database.Host[0] == '/' { // looks like a unix socket connType = "unix" } - tls := Database.SSLMode + tls := database.SSLMode if tls == "disable" { // allow (Postgres-inspired) default value to work in MySQL tls = "false" } connStr = fmt.Sprintf("%s:%s@%s(%s)/%s%scharset=%s&parseTime=true&tls=%s", - Database.User, Database.Passwd, connType, Database.Host, Database.Name, Param, Database.Charset, tls) + database.User, database.Passwd, connType, database.Host, database.Name, Param, database.Charset, tls) case "postgres": - connStr = getPostgreSQLConnectionString(Database.Host, Database.User, Database.Passwd, Database.Name, Param, Database.SSLMode) + connStr = getPostgreSQLConnectionString(database.Host, database.User, database.Passwd, database.Name, Param, database.SSLMode) case "mssql": - host, port := ParseMSSQLHostPort(Database.Host) - connStr = fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, Database.Name, Database.User, Database.Passwd) + host, port := ParseMSSQLHostPort(database.Host) + connStr = fmt.Sprintf("server=%s; port=%s; database=%s; user id=%s; password=%s;", host, port, database.Name, database.User, database.Passwd) case "sqlite3": if !EnableSQLite3 { return "", errors.New("this binary version does not build support for SQLite3") } - if err := os.MkdirAll(path.Dir(Database.Path), os.ModePerm); err != nil { + if err := os.MkdirAll(path.Dir(database.Path), os.ModePerm); err != nil { return "", fmt.Errorf("Failed to create directories: %v", err) } - connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", Database.Path, Database.Timeout) + connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate", database.Path, database.Timeout) default: - return "", fmt.Errorf("Unknown database type: %s", Database.Type) + return "", fmt.Errorf("Unknown database type: %s", database.Type) } return connStr, nil diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 0fbc9f909..e1e7b7902 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -482,6 +482,14 @@ var ( PoolInfos string Flavor string FlavorInfos string + + //elk config + ElkUrl string + ElkUser string + ElkPassword string + Index string + TimeField string + ElkTimeFormat string ) // DateLang transforms standard language locale name to corresponding value in datetime plugin. @@ -1201,6 +1209,14 @@ func NewContext() { PoolInfos = sec.Key("POOL_INFOS").MustString("") Flavor = sec.Key("FLAVOR").MustString("") FlavorInfos = sec.Key("FLAVOR_INFOS").MustString("") + + sec = Cfg.Section("elk") + ElkUrl = sec.Key("ELKURL").MustString("http://192.168.207.35:5601/internal/bsearch") + ElkUser = sec.Key("ELKUSER").MustString("Qizhi") + ElkPassword = sec.Key("ELKPASSWORD").MustString("Pcl2020") + Index = sec.Key("INDEX").MustString("filebeat-7.3.2*") + TimeField = sec.Key("TIMEFIELD").MustString(" @timestamptest") + ElkTimeFormat = sec.Key("ELKTIMEFORMAT").MustString("date_time") } func loadInternalToken(sec *ini.Section) string { diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 50e85ba27..746e46463 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -87,7 +87,7 @@ write = Write preview = Preview loading = Loading… -error404_index = Request forbidden by administrative rules +error404_index = Request forbidden by administrative rules error500_index = Internal Server Error error404 = The page you are trying to reach either does not exist or you are not authorized to view it. error500= Sorry, the site has encountered some problems, we are trying to fix the page, please try again later. @@ -573,7 +573,7 @@ authorized_oauth2_applications_description = You've granted access to your perso revoke_key = Revoke revoke_oauth2_grant = Revoke Access revoke_oauth2_grant_description = Revoking access for this third party application will prevent this application from accessing your data. Are you sure? -revoke_oauth2_grant_success = You've revoked access successfully. +revoke_oauth2_grant_success = You have revoked access successfully. twofa_desc = Two-factor authentication enhances the security of your account. twofa_is_enrolled = Your account is currently enrolled in two-factor authentication. @@ -770,6 +770,10 @@ cloudbrain_selection = select cloudbrain cloudbrain_platform_selection = Select the cloudbrain platform you want to use: confirm_choice = confirm cloudbran1_tips = Only data in zip format can create cloudbrain tasks +cloudbrain_creator=Creator +cloudbrain_task = Task Name +cloudbrain_operate = Operate +cloudbrain_status_createtime = Status/Createtime template.items = Template Items template.git_content = Git Content (Default Branch) @@ -831,6 +835,7 @@ fork = Fork download_archive = Download Repository no_desc = No Description +no_label = No labels quick_guide = Quick Guide clone_this_repo = Clone this repository create_new_repo_command = Creating a new repository on the command line @@ -845,6 +850,7 @@ filter_branch_and_tag = Filter branch or tag branches = Branches tags = Tags issues = Issues +issues_detail = Detail pulls = Pull Requests labels = Labels org_labels_desc = Organization level labels that can be used with all repositories under this organization @@ -1241,6 +1247,11 @@ pulls.reject_count_1 = "%d change request" pulls.reject_count_n = "%d change requests" pulls.waiting_count_1 = "%d waiting review" pulls.waiting_count_n = "%d waiting reviews" +pulls.commits_count_1=This branch is %d commit behind the upstream. +pulls.commits_count_n=This branch is %d commit behind the upstream. +pulls.fetch_upstream=Fetch upstream +pulls.upstream_up_to_date=No new commits to fetch +pulls.upstream_error=Cannot get upstream info pulls.no_merge_desc = This pull request cannot be merged because all repository merge options are disabled. pulls.no_merge_helper = Enable merge options in the repository settings or merge the pull request manually. diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 89a6282e5..6dc0d410c 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -772,6 +772,10 @@ cloudbrain_selection=云脑选择 cloudbrain_platform_selection=选择您准备使用的云脑平台: confirm_choice=确定 cloudbran1_tips=只有zip格式的数据集才能发起云脑任务 +cloudbrain_creator=创建者 +cloudbrain_task=任务名称 +cloudbrain_operate=操作 +cloudbrain_status_createtime=状态/创建时间 template.items=模板选项 template.git_content=Git数据(默认分支) @@ -833,6 +837,7 @@ fork=派生 download_archive=下载此项目 no_desc=暂无描述 +no_label = 暂无标签 quick_guide=快速帮助 clone_this_repo=克隆当前项目 create_new_repo_command=从命令行创建一个新的项目 @@ -847,6 +852,7 @@ filter_branch_and_tag=过滤分支或标签 branches=分支列表 tags=标签列表 issues=任务 +issues_detail=详情 pulls=合并请求 labels=标签 org_labels_desc=组织级别的标签,可以被本组织下的 所有项目 使用 @@ -1243,6 +1249,11 @@ pulls.reject_count_1=%d 变更请求 pulls.reject_count_n=%d 变更请求 pulls.waiting_count_1=%d 个正在等待审核 pulls.waiting_count_n=%d 个正在等待审核 +pulls.commits_count_1=当前分支落后上游分支 %d 个提交 +pulls.commits_count_n=当前分支落后上游分支 %d 个提交 +pulls.fetch_upstream=拉取上游更新 +pulls.upstream_up_to_date=上游分支没有新的更新 +pulls.upstream_error=获取上游分支信息错误 pulls.no_merge_desc=由于未启用合并选项,此合并请求无法被合并。 pulls.no_merge_helper=在项目设置中启用合并选项或者手工合并请求。 diff --git a/public/img/icons.svg b/public/img/icons.svg new file mode 100644 index 000000000..3a83f8cdf --- /dev/null +++ b/public/img/icons.svg @@ -0,0 +1,1653 @@ + + + + 切图 + Created with Sketch. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 存量 + + + + + + + + + + + + + + + + + + + + + + + + R + + + + + M + + + + + A + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Microsof + t + + + + + G + + + o + + + o + + + g + + + l + + + e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 托管 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + tts.wav + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + CSB + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 共享 + + + + + + 默认 + + + + + + GPU + + + + + + NEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/public/img/loading.gif b/public/img/loading.gif new file mode 100644 index 000000000..2c45c7bca Binary files /dev/null and b/public/img/loading.gif differ diff --git a/public/self/labelTaskPage.js b/public/self/labelTaskPage.js index 4ce48407a..a160c3961 100644 --- a/public/self/labelTaskPage.js +++ b/public/self/labelTaskPage.js @@ -23,7 +23,7 @@ var pageSize = 10; var tableData; var tablePageData; -var preDictTaskData; +var modelListData; var dataSetTaskData; var userInfoData; @@ -35,8 +35,8 @@ console.log("repoId=" + repoId); function setDataSetTask(){ - dataset_task_list(); - display_createdatasetlabel(0); + //dataset_task_list(); + //display_createdatasetlabel(0); //getUser(); //dislpayUser(); getLabelPropertyTask(); @@ -121,46 +121,31 @@ function countLabel(){ } -function display_createdatasetlabel(sindex=-1){ - var html=""; - for (var i=0;i"+dataSetTaskData[i].task_name+ - ""; - $("#datasetlabeltaskname").attr({value:dataSetTaskData[i].task_name + "-人工标注"}); - }else{ - var row = ""; - } - html=html+row; - } - console.log(html); - document.getElementById('dataset_list').innerHTML=html; -} + function setPredictTask(){ - pre_predict_task_list(); - display_createlabel(0); - getUser(); - dislpayUser(); - getLabelPropertyTask(); - displayLabelPropertyTask(); + //dataset_task_list(); + //display_createdatasetlabel(1); + + + get_model_list(); + + displayModelTask(); + $(".ui.predict.modal").modal("show"); } -function pre_predict_task_list(){ +function get_model_list(){ $.ajax({ type:"GET", - url:ip + "/api/pre-predict-taskforLabel/", + url:ip + "/api/queryAlgModelForAutoLabel/", headers: { authorization:token, }, dataType:"json", async:false, success:function(json){ - preDictTaskData = json; + modelListData = json; console.log(json); // return json.token; }, @@ -170,12 +155,26 @@ function pre_predict_task_list(){ }); } -function sele_Change(sele){ - var predictTaskName = $('#pre_predict_task_for_label option:selected').text(); - console.log("select predictTaskName =" + predictTaskName); - $("#labeltaskname").attr({value:predictTaskName+"-人工标注"}); +function displayModelTask(){ + var html=""; + for (var i=0;i"+modelListData[i].model_name + + ""; + }else{ + var row = ""; + } + html=html+row; + } + console.log(html); + + document.getElementById('model_list').innerHTML=html; } + function sele_export_Change(sele){ var isNeedPicture = $('#isNeedPicture option:selected').val(); @@ -201,27 +200,13 @@ function dataset_sele_Change(sele){ $("#datasetlabeltaskname").attr({value:dataset_listName+"-人工标注"}); } - - -function display_createlabel(sindex=-1){ - var html=""; - for (var i=0;i"+preDictTaskData[i].task_name+ - ""; - $("#labeltaskname").attr({value:preDictTaskData[i].task_name + "-人工标注"}); - }else{ - var row = ""; - } - html=html+row; - } - console.log(html); - document.getElementById('pre_predict_task_for_label').innerHTML=html; +function dataset_auto_sele_Change(sele){ + var dataset_listName = $('#dataset_list_auto option:selected').text(); + console.log("select dataset_list_auto =" + dataset_listName); + $("#autolabeltaskname").attr({value:dataset_listName+"-自动标注"}); } + var createsucced; function submit_datasettask(){ @@ -242,7 +227,7 @@ function submit_datasettask(){ } var labelpropertytaskid = $('#labelpropertytask_dataset option:selected').val(); createsucced = true; - label_task_create(task_name, relate_task_id, 2,assign_user_id,labelpropertytaskid); + label_task_create(task_name, relate_task_id, 2,assign_user_id,labelpropertytaskid,-1); if(createsucced){ $(".ui.dataset.modal").modal("hide"); //$("#labelDataModal").modal('hide'); @@ -250,25 +235,30 @@ function submit_datasettask(){ page(0,pageSize); } -function submit_labeltask(){ - console.log($('#labeltaskname').val()); - var task_name = $('#labeltaskname').val(); +function submit_autolabeltask(){ + console.log($('#autolabeltaskname').val()); + var task_name = $('#autolabeltaskname').val(); if (isEmpty(task_name) || task_name.length > 32){ - alert("人工标注任务名称不能为空或者不能超过32个字符。"); + alert("自动标注任务名称不能为空或者不能超过32个字符。"); return; } - var relate_task_id = $('#pre_predict_task_for_label option:selected').val(); + var model_id = $('#model_list option:selected').val(); + if(isEmpty(model_id)){ + alert("标注模型不能为空。"); + return; + } + var relate_task_id = $('#dataset_list_auto option:selected').val(); if(isEmpty(relate_task_id)){ - alert("关联的自动标注任务不能为空。"); + alert("数据集对象不能为空。"); return; } var assign_user_id = $('#label_assign_user option:selected').val(); if(isEmpty(assign_user_id)){ assign_user_id = token; } - var labelpropertytaskid = $('#labelpropertytask_dataset option:selected').val(); + var labelpropertytaskid = $('#labelpropertytask_auto option:selected').val(); createsucced = true; - label_task_create(task_name, relate_task_id, 1,assign_user_id,labelpropertytaskid); + label_task_create(task_name, relate_task_id, 1,assign_user_id,labelpropertytaskid,model_id); if(createsucced){ $("#labelModal").modal('hide'); } @@ -276,10 +266,10 @@ function submit_labeltask(){ } -function label_task_create(task_name, relate_task_id, taskType,assign_user_id,labelpropertytaskid){ +function label_task_create(task_name, relate_task_id, taskType,assign_user_id,labelpropertytaskid,model_id){ - var task_flow_type = $('#task_flow_type option:selected').val(); + var task_flow_type = $('#task_flow_type option:selected').val(); var relate_other_label_task = []; if(task_flow_type == 2){ @@ -309,20 +299,21 @@ function label_task_create(task_name, relate_task_id, taskType,assign_user_id,la 'assign_user_id':assign_user_id, 'task_flow_type':task_flow_type, 'relate_task_id':relate_task_id,//task id - 'relate_other_label_task': relate_other_label_task_jsonstr, + 'relate_other_label_task': relate_other_label_task_jsonstr, "taskType": taskType, - "appid": repoId, - "createUserName":userName, - "labelPropertyTaskId":labelpropertytaskid + "appid": repoId, + "createUserName":userName, + "labelPropertyTaskId":labelpropertytaskid, + "modelId":model_id }), success:function(res){ console.log(res); if(res.code == 0){ - alert("人工标注任务创建成功!"); + alert("自动标注任务创建成功!"); createsucced = true; } else{ - alert("创建人工标注任务失败," + res.message); + alert("创建自动标注任务失败," + res.message); createsucced = false; } }, @@ -433,11 +424,11 @@ function delete_labeltask(){ return; } var Check = $("table[id='label_task_list'] input[type=checkbox]:checked");//在table中找input下类型为checkbox属性为选中状态的数据 -       Check.each(function () {//遍历 -             var row = $(this).parent("td").parent("tr");//获取选中行 -             var id = row.find("[id='labeltask_id']").html();//获取name='Sid'的值 -             delete_labeltask_byid(id); -         }); + Check.each(function () {//遍历 + var row = $(this).parent("td").parent("tr");//获取选中行 + var id = row.find("[id='labeltask_id']").html();//获取name='Sid'的值 + delete_labeltask_byid(id); + }); page(0,pageSize); } @@ -480,13 +471,13 @@ function delete_labeltask_byid(label_task_id){ function getTaskTypeDesc(task_type){ if(task_type == 1){ - return "自动标注结果"; + return "图片-自动标注"; }else if(task_type == 2){ - return "原始数据集-图片"; + return "图片"; }else if(task_type == 3){ - return "原始数据集-CT影像"; + return "CT影像"; }else if(task_type == 4){ - return "原始数据集-视频"; + return "视频"; } return "其它"; } @@ -508,6 +499,12 @@ function getTaskSataus(task_status,task_status_desc){ else if(task_status == -1){ return "关联的数据集已经被删除。" } + else if(task_status == 20){ + return "自动标注进行中:" + task_status_desc; + }else if(task_status == 21){ + return task_status_desc; + } + return ""; } function getVerify(task_status,id,task_type){ @@ -522,7 +519,7 @@ function getVerify(task_status,id,task_type){ } function getLabel(task_status,id,task_type,task_flow_type){ - if(task_status == 0 && (userType == 1 || userType == 0)){ + if((task_status == 0 || task_status == 21) && (userType == 1 || userType == 0)){ return "" + getLabelDesc(task_flow_type) + "标注
"; }else{ return ""; @@ -570,8 +567,59 @@ function display_list(){ $('#label_task_list tr').find('td:eq(1)').hide(); $('#label_task_list tr').find('th:eq(1)').hide(); -} + isNeedToRefresh = false; + taskNeedIntervalToRefresh(); +} + +var isNeedToRefresh; +var refreshTimeId = []; +var refreshCount; +var refreshMaxTime; + +function taskNeedIntervalToRefresh(){ + var isNeedToSetInterval = false; + refreshMaxTime= 1; + if(!isEmpty(tableData)){ + for (var i=0;i 10) { + if task.Status == string(models.JobRunning) && (timestamp-int64(task.Cloudbrain.CreatedUnix) > 10) { ciTasks[i].CanDebug = true } else { ciTasks[i].CanDebug = false } + + ciTasks[i].CanDel = models.CanDelJob(ctx.IsSigned, ctx.User, task) } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5) @@ -361,6 +366,78 @@ func CloudBrainStop(ctx *context.Context) { ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/cloudbrain") } +func StopJobsByUserID(userID int64) { + cloudBrains, err := models.GetCloudbrainsNeededStopByUserID(userID) + if err != nil { + log.Warn("Failed to get cloudBrain info", err) + return + } + StopJobs(cloudBrains) + +} + +func StopJobsByRepoID(repoID int64) { + cloudBrains, err := models.GetCloudbrainsNeededStopByRepoID(repoID) + if err != nil { + log.Warn("Failed to get cloudBrain info", err) + return + } + StopJobs(cloudBrains) +} + +/** + + */ +func StopJobs(cloudBrains []*models.Cloudbrain) { + + for _, taskInfo := range cloudBrains { + + if taskInfo.Type == models.TypeCloudBrainOne { + err := retry(3, time.Second*30, func() error { + return cloudbrain.StopJob(taskInfo.JobID) + }) + + logErrorAndUpdateJobStatus(err, taskInfo) + } else { + param := models.NotebookAction{ + Action: models.ActionStop, + } + err := retry(3, time.Second*30, func() error { + _, err := modelarts.StopJob(taskInfo.JobID, param) + return err + }) + logErrorAndUpdateJobStatus(err, taskInfo) + } + + } +} + +func retry(attempts int, sleep time.Duration, f func() error) (err error) { + for i := 0; i < attempts; i++ { + if i > 0 { + log.Warn("retrying after error:", err) + time.Sleep(sleep) + } + err = f() + if err == nil { + return nil + } + } + return fmt.Errorf("after %d attempts, last error: %s", attempts, err) +} + +func logErrorAndUpdateJobStatus(err error, taskInfo *models.Cloudbrain) { + if err != nil { + log.Warn("Failed to stop cloudBrain job:"+taskInfo.JobID, err) + } else { + taskInfo.Status = string(models.JobStopped) + err = models.UpdateJob(taskInfo) + if err != nil { + log.Warn("UpdateJob failed", err) + } + } +} + func CloudBrainDel(ctx *context.Context) { var jobID = ctx.Params(":jobid") task, err := models.GetCloudbrainByJobID(jobID) diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 2dc327d33..080c36377 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -57,6 +57,8 @@ func ModelArtsIndex(ctx *context.Context) { } else { ciTasks[i].CanDebug = false } + + ciTasks[i].CanDel = models.CanDelJob(ctx.IsSigned, ctx.User, task) } pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5) diff --git a/routers/repo/repo_statistic.go b/routers/repo/repo_statistic.go new file mode 100755 index 000000000..ceb410958 --- /dev/null +++ b/routers/repo/repo_statistic.go @@ -0,0 +1,122 @@ +package repo + +import ( + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" +) + +//auto daily or manually +func RepoStatisticAuto() { + log.Info("", time.Now()) + yesterday := time.Now().AddDate(0, 0, -1).Format("2006-01-02") + RepoStatisticDaily(yesterday) +} + +func RepoStatisticDaily(date string) { + log.Info("%s", date) + if err := models.DeleteRepoStatDaily(date); err != nil { + log.Error("DeleteRepoStatDaily failed: %v", err.Error()) + return + } + + repos, err := models.GetAllRepositories() + if err != nil { + log.Error("GetAllRepositories failed: %v", err.Error()) + return + } + + for _, repo := range repos { + log.Info("start statistic: %s", repo.Name) + repoGitStat, err := models.GetRepoKPIStats(repo) + if err != nil { + log.Error("GetRepoKPIStats failed: %s", repo.Name) + log.Error("failed statistic: %s", repo.Name) + continue + } + + var issueFixedRate float32 + if repo.NumIssues != 0 { + issueFixedRate = float32(repo.NumClosedIssues) / float32(repo.NumIssues) + } + + numVersions, err := models.GetReleaseCountByRepoID(repo.ID, models.FindReleasesOptions{}) + if err != nil { + log.Error("GetReleaseCountByRepoID failed: %s", repo.Name) + log.Error("failed statistic: %s", repo.Name) + continue + } + + datasetSize, err := getDatasetSize(repo) + if err != nil { + log.Error("getDatasetSize failed: %s", repo.Name) + log.Error("failed statistic: %s", repo.Name) + continue + } + + numComments, err := models.GetCommentCountByRepoID(repo.ID) + if err != nil { + log.Error("GetCommentCountByRepoID failed: %s", repo.Name) + log.Error("failed statistic: %s", repo.Name) + continue + } + + //beginTime, endTime := getStatTime(date) + //numVisits := repository.AppointProjectView(repo.OwnerName, repo.Name, beginTime, endTime) + numVisits := 0 + + repoStat := models.RepoStatistic{ + RepoID: repo.ID, + Date: date, + NumWatches: int64(repo.NumWatches), + NumStars: int64(repo.NumStars), + NumDownloads: repo.CloneCnt, + NumComments: numComments, + NumVisits: int64(numVisits), + NumClosedIssues: int64(repo.NumClosedIssues), + NumVersions: numVersions, + NumDevMonths: repoGitStat.DevelopAge, + RepoSize: repo.Size, + DatasetSize: datasetSize, + NumModels: 0, + NumWikiViews: repoGitStat.WikiPages, + NumCommits: repo.NumCommit, + NumIssues: int64(repo.NumIssues), + NumPulls: int64(repo.NumPulls), + IssueFixedRate: issueFixedRate, + NumContributor: repoGitStat.Contributors, + NumKeyContributor: repoGitStat.KeyContributors, + } + + if _, err = models.InsertRepoStat(&repoStat); err != nil { + log.Error("InsertRepoStat failed: %s", repo.Name) + log.Error("failed statistic: %s", repo.Name) + continue + } + + log.Info("finish statistic: %s", repo.Name) + } + +} + +func getDatasetSize(repo *models.Repository) (int64, error) { + dataset, err := models.GetDatasetByRepo(repo) + if err != nil { + return 0, err + } + + return models.GetAttachmentSizeByDatasetID(dataset.ID) +} + +func getStatTime(timeStr string) (string, string) { + t, _ := time.Parse("2006-01-02", timeStr) + timeNumber := t.Unix() + beginTimeNumber := timeNumber - 8*60*60 + endTimeNumber := timeNumber + 16*60*60 + beginTime := time.Unix(beginTimeNumber, 0).Format(time.RFC3339) + endTime := time.Unix(endTimeNumber, 0).Format(time.RFC3339) + log.Info("%s, %s", beginTime, endTime) + + return beginTime, endTime +} diff --git a/routers/repo/setting.go b/routers/repo/setting.go index b2ca042de..bf11f9e5a 100644 --- a/routers/repo/setting.go +++ b/routers/repo/setting.go @@ -440,6 +440,7 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) { return } log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name) + go StopJobsByRepoID(repo.ID) ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success")) ctx.Redirect(ctx.Repo.Owner.DashboardLink()) diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go new file mode 100755 index 000000000..3260780ac --- /dev/null +++ b/routers/repo/user_data_analysis.go @@ -0,0 +1,59 @@ +package repo + +import ( + "time" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/log" +) + +func TimingCountData() { + //query wiki data + log.Info("start to time count data") + wikiMap := make(map[string]int) + + currentTimeNow := time.Now() + log.Info("current time:" + currentTimeNow.Format("2006-01-02 15:04:05")) + + yesterday := currentTimeNow.AddDate(0, 0, -1) + + repoList, err := models.GetAllRepositories() + if err != nil { + log.Error("query repo error.") + return + } + log.Info("start to query wiki data") + for _, repoRecord := range repoList { + wikiPath := models.WikiPath(repoRecord.OwnerName, repoRecord.Name) + time, err := git.GetLatestCommitTime(wikiPath) + if err == nil { + log.Info("last commit time:" + time.Format("2006-01-02 15:04:05") + " wikiPath=" + wikiPath) + if time.After(yesterday) { + wikiRepo, _, err := FindWikiRepoCommitByWikiPath(wikiPath) + if err != nil { + log.Error("wiki not exist. wikiPath=" + wikiPath) + } else { + log.Info("wiki exist, wikiPath=" + wikiPath) + list, err := wikiRepo.GetCommitByPathAndDays(wikiPath, 1) + if err != nil { + log.Info("err,err=v%", err) + } else { + for logEntry := list.Front(); logEntry != nil; logEntry = logEntry.Next() { + commit := logEntry.Value.(*git.Commit) + log.Info("commit msg=" + commit.CommitMessage + " time=" + commit.Committer.When.Format("2006-01-02 15:04:05") + " user=" + commit.Committer.Name) + if _, ok := wikiMap[commit.Committer.Name]; !ok { + wikiMap[commit.Committer.Name] = 1 + } else { + wikiMap[commit.Committer.Name] += 1 + } + } + } + + } + } + } + } + //other user info data + models.CountData(wikiMap) +} diff --git a/routers/repo/view.go b/routers/repo/view.go index 1546f53b7..9477b27dd 100644 --- a/routers/repo/view.go +++ b/routers/repo/view.go @@ -790,6 +790,46 @@ func renderCode(ctx *context.Context) { } } + //如果是fork的仓库 + if ctx.Repo.Repository.IsFork { + //获得fetchUpstream对应的分支参数 + /* + // 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch} + // 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch} + // 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch} + */ + baseGitRepo, err := git.OpenRepository(ctx.Repo.Repository.BaseRepo.RepoPath()) + defer baseGitRepo.Close() + if err != nil { + log.Error("error open baseRepo:%s",ctx.Repo.Repository.BaseRepo.RepoPath()) + ctx.Data["FetchUpstreamCnt"] = -1 // minus value indicates error + }else{ + if _,error:= baseGitRepo.GetBranch(ctx.Repo.BranchName);error==nil{ + //base repo has the same branch, then compare between current repo branch and base repo's branch + compareUrl := ctx.Repo.BranchName + "..." + ctx.Repo.Repository.BaseRepo.OwnerName + "/" + ctx.Repo.Repository.BaseRepo.Name + ":" + ctx.Repo.BranchName + ctx.SetParams("*",compareUrl) + ctx.Data["UpstreamSameBranchName"] = true + }else{ + //else, compare between current repo branch and base repo's default branch + compareUrl := ctx.Repo.BranchName + "..." + ctx.Repo.Repository.BaseRepo.OwnerName + "/" + ctx.Repo.Repository.BaseRepo.Name + ":" + ctx.Repo.Repository.BaseRepo.DefaultBranch + ctx.SetParams("*",compareUrl) + ctx.Data["UpstreamSameBranchName"] = false + } + _, _, headGitRepo, compareInfo, _, _ := ParseCompareInfo(ctx) + defer headGitRepo.Close() + if compareInfo!= nil { + if compareInfo.Commits!=nil { + log.Info("compareInfoCommits数量:%d",compareInfo.Commits.Len()) + ctx.Data["FetchUpstreamCnt"] = compareInfo.Commits.Len() + }else{ + log.Info("compareInfo nothing different") + ctx.Data["FetchUpstreamCnt"] = 0 + } + }else{ + ctx.Data["FetchUpstreamCnt"] = -1 // minus value indicates error + } + } + } ctx.Data["Paths"] = paths ctx.Data["TreeLink"] = treeLink ctx.Data["TreeNames"] = treeNames diff --git a/routers/repo/wiki.go b/routers/repo/wiki.go index 5da01f21a..03ab42036 100644 --- a/routers/repo/wiki.go +++ b/routers/repo/wiki.go @@ -82,6 +82,20 @@ func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) return commit.GetTreeEntryByPath(unescapedTarget) } +func FindWikiRepoCommitByWikiPath(wikiPath string) (*git.Repository, *git.Commit, error) { + wikiRepo, err := git.OpenRepository(wikiPath) + if err != nil { + log.Info("get wiki error.") + return nil, nil, err + } + + commit, err := wikiRepo.GetBranchCommit("master") + if err != nil { + return wikiRepo, nil, err + } + return wikiRepo, commit, nil +} + func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) { wikiRepo, err := git.OpenRepository(ctx.Repo.Repository.WikiPath()) if err != nil { @@ -150,6 +164,7 @@ func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) { if !entry.IsRegular() { continue } + wikiName, err := wiki_service.FilenameToName(entry.Name()) if err != nil { if models.IsErrWikiInvalidFileName(err) { diff --git a/routers/secure/user.go b/routers/secure/user.go index 8567dc9e6..d5b303d5e 100755 --- a/routers/secure/user.go +++ b/routers/secure/user.go @@ -7,6 +7,7 @@ package secure import ( "net/http" + "net/mail" "strings" "code.gitea.io/gitea/models" @@ -63,6 +64,14 @@ func CreateUser(ctx *context.Context, form api.CreateUserOption) { // "422": // "$ref": "#/responses/validationError" + _, err1 := mail.ParseAddress(form.Email) + if err1 != nil { + ctx.JSON(http.StatusBadRequest, map[string]string{ + "error_msg": "Email format is wrong.", + }) + return + } + u := &models.User{ Name: form.Username, FullName: form.FullName, diff --git a/routers/user/auth.go b/routers/user/auth.go index 13e338565..126d0a4c8 100755 --- a/routers/user/auth.go +++ b/routers/user/auth.go @@ -11,6 +11,8 @@ import ( "net/http" "strings" + "code.gitea.io/gitea/routers/repo" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/auth/oauth2" @@ -1056,6 +1058,7 @@ func SignOut(ctx *context.Context) { }) } HandleSignOut(ctx) + go repo.StopJobsByUserID(ctx.User.ID) ctx.Redirect(setting.AppSubURL + "/") } diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl index 0dac7824c..c22cb9fa7 100755 --- a/templates/base/head_navbar.tmpl +++ b/templates/base/head_navbar.tmpl @@ -154,7 +154,7 @@ {{svg "octicon-person" 16}} {{.i18n.Tr "register"}} {{end}} - + {{svg "octicon-sign-in" 16}} {{.i18n.Tr "sign_in"}} diff --git a/templates/base/head_navbar_home.tmpl b/templates/base/head_navbar_home.tmpl index 2379c1782..e6729da62 100644 --- a/templates/base/head_navbar_home.tmpl +++ b/templates/base/head_navbar_home.tmpl @@ -154,7 +154,7 @@ {{svg "octicon-person" 16}} {{.i18n.Tr "register"}} {{end}} - + {{svg "octicon-sign-in" 16}} {{.i18n.Tr "sign_in"}} diff --git a/templates/explore/dataset_search.tmpl b/templates/explore/dataset_search.tmpl index 6c973026a..aad8f5083 100644 --- a/templates/explore/dataset_search.tmpl +++ b/templates/explore/dataset_search.tmpl @@ -1,12 +1,12 @@
-
+
- +
diff --git a/templates/explore/repo_left.tmpl b/templates/explore/repo_left.tmpl index af32851c0..ca6a3b3dd 100755 --- a/templates/explore/repo_left.tmpl +++ b/templates/explore/repo_left.tmpl @@ -6,67 +6,67 @@ 全部领域 - + 大模型 - + AI开发工具 - + 计算机视觉 - + 自然语言处理 - + 机器学习 - + 神经网络 - + 自动驾驶 - + 机器人 - + 联邦学习 - + 数据挖掘 - + diff --git a/templates/explore/repo_list.tmpl b/templates/explore/repo_list.tmpl index a9eaaaccf..f27990701 100755 --- a/templates/explore/repo_list.tmpl +++ b/templates/explore/repo_list.tmpl @@ -40,20 +40,20 @@
@@ -143,7 +143,7 @@ {{if .Topics }}
{{range .Topics}} - {{if ne . "" }}
{{.}}
{{end}} + {{if ne . "" }}
{{.}}
{{end}} {{end}}
{{end}} diff --git a/templates/explore/repo_search.tmpl b/templates/explore/repo_search.tmpl index 7ed853fc8..d30ad5625 100644 --- a/templates/explore/repo_search.tmpl +++ b/templates/explore/repo_search.tmpl @@ -9,7 +9,7 @@
- +
diff --git a/templates/explore/search.tmpl b/templates/explore/search.tmpl index 11e89d50a..4353f7302 100644 --- a/templates/explore/search.tmpl +++ b/templates/explore/search.tmpl @@ -1,11 +1,11 @@
- +
- +
diff --git a/templates/org/repo_list.tmpl b/templates/org/repo_list.tmpl index 2e047d209..eab03c591 100644 --- a/templates/org/repo_list.tmpl +++ b/templates/org/repo_list.tmpl @@ -58,7 +58,7 @@ {{if .Topics }}
{{range .Topics}} - {{if ne . "" }}
{{.}}
{{end}} + {{if ne . "" }}
{{.}}
{{end}} {{end}}
{{end}} diff --git a/templates/repo/cloudbrain/index.tmpl b/templates/repo/cloudbrain/index.tmpl index 532a8f4ff..3af96998c 100755 --- a/templates/repo/cloudbrain/index.tmpl +++ b/templates/repo/cloudbrain/index.tmpl @@ -187,6 +187,12 @@ cursor: pointer; pointer-events: none; } + .time-show{ + font-size: 10px; + margin-top: 0.4rem; + display: inline-block; + } + @@ -235,45 +241,91 @@
-
+ +
-
+
--> + +
+
+
+
+ {{$.i18n.Tr "repo.cloudbrain_task"}} +
+
+ {{$.i18n.Tr "repo.cloudbrain_status_createtime"}} +
+
+ {{$.i18n.Tr "repo.cloudbrain_creator"}} +
+
+ {{$.i18n.Tr "repo.cloudbrain_operate"}} +
+ +
+ +
{{range .Tasks}}
-
- - {{svg "octicon-tasklist" 16}} - {{.JobName}} +
- - {{.Status}} + + + + {{.Status}} + + - {{TimeSinceUnix .CreatedUnix $.Lang}} + {{TimeSinceUnix .Cloudbrain.CreatedUnix $.Lang}}
+
+ {{if .User.Name}} + + {{else}} + + {{end}} +
-
+
{{if and (ne .Status "WAITING") (ne .JobType "DEBUG")}} 评分 @@ -304,10 +356,10 @@
- -
+ + {{$.CsrfTokenHtml}} - + 删除
@@ -436,15 +488,18 @@ $(document).ready(loadJobStatus); function loadJobStatus() { $(".job-status").each((index, job) => { + console.log("---------",index,job) const jobID = job.dataset.jobid; const repoPath = job.dataset.repopath; if (job.textContent.trim() == 'STOPPED') { + return } $.get(`/api/v1/repos/${repoPath}/cloudbrain/${jobID}`, (data) => { const jobID = data.JobID const status = data.JobStatus + console.log("status",status) if (status != job.textContent.trim()) { //$('#' + jobID).text(status) //if (status == 'STOPPED') { diff --git a/templates/repo/datasets/dataset_list.tmpl b/templates/repo/datasets/dataset_list.tmpl index cf6c47926..5cb35aa77 100755 --- a/templates/repo/datasets/dataset_list.tmpl +++ b/templates/repo/datasets/dataset_list.tmpl @@ -1,63 +1,52 @@ + {{if .Attachments}} {{range .Attachments}}
-
+ -
- {{.Size | FileSize}} -
-
- {{svg "octicon-flame" 16}} {{(.DownloadCount | PrettyNumber)}} -
- -
- {{svg "octicon-file" 16}} -
- -
- {{svg "octicon-file-binary" 16}} -
- - - {{if $.IsSigned}} -
- +
+
+ {{svg "octicon-flame" 16}} {{(.DownloadCount | PrettyNumber)}} + + {{svg "octicon-file-binary" 16}}
- {{end}} - {{if not .CanDel}} -
- {{if .IsPrivate}} {{$.i18n.Tr "dataset.private"}} {{else}} {{$.i18n.Tr "dataset.public"}} {{end}} + {{if ne .DecompressState 0}} + - {{else}} - {{if $.Permission.CanWrite $.UnitTypeDatasets}} + {{end}} + {{if not .CanDel}} + {{$.i18n.Tr "dataset.delete"}} + {{if .IsPrivate}} {{$.i18n.Tr "dataset.private"}} {{else}} {{$.i18n.Tr "dataset.public"}} {{end}} + {{else}} + {{if $.Permission.CanWrite $.UnitTypeDatasets}} + {{$.i18n.Tr "dataset.delete"}} {{if $.Repository.IsPrivate}} - - {{ else }} -
- + {{end}} + {{else}} + {{$.i18n.Tr "dataset.delete"}} + {{if .IsPrivate}} {{$.i18n.Tr "dataset.private"}} {{else}} {{$.i18n.Tr "dataset.public"}} {{end}} {{end}} - - {{else}} - {{end}} - {{end}} +
+
{{end}} diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl index 6fa6ccb69..167b1ef44 100755 --- a/templates/repo/datasets/index.tmpl +++ b/templates/repo/datasets/index.tmpl @@ -37,7 +37,7 @@
{{if .Permission.CanWrite $.UnitTypeDatasets}} @@ -66,7 +66,7 @@
{{.i18n.Tr "cancel"}} - +
@@ -80,7 +80,7 @@
-

{{if eq .Type 0}}{{.i18n.Tr "repo.cloudbrain1"}}{{else}}{{.i18n.Tr "repo.cloudbrain2"}}{{end}}-{{.i18n.Tr "datasets"}}

+ {{if eq .Type 0}}{{.i18n.Tr "repo.cloudbrain1"}}{{else}}{{.i18n.Tr "repo.cloudbrain2"}}{{end}}-{{.i18n.Tr "datasets"}}