diff --git a/cmd/serv.go b/cmd/serv.go index 7c2be5157..4785315c8 100644 --- a/cmd/serv.go +++ b/cmd/serv.go @@ -6,6 +6,7 @@ package cmd import ( + "code.gitea.io/gitea/services/repository" "encoding/json" "fmt" "net/http" @@ -208,6 +209,21 @@ func runServ(c *cli.Context) error { os.Setenv(models.ProtectedBranchPRID, fmt.Sprintf("%d", 0)) os.Setenv(models.EnvIsDeployKey, fmt.Sprintf("%t", results.IsDeployKey)) os.Setenv(models.EnvKeyID, fmt.Sprintf("%d", results.KeyID)) + //set environment for pre-receive hook script + if verb == "git-receive-pack" { + os.Setenv(models.EnvRepoMaxFileSize, fmt.Sprint(setting.Repository.Upload.FileMaxSize)) + os.Setenv(models.EnvRepoMaxSize, fmt.Sprint(setting.Repository.RepoMaxSize)) + os.Setenv(models.EnvPushSizeCheckFlag, fmt.Sprint(setting.Repository.Upload.ShellFlag)) + if setting.Repository.Upload.ShellFlag == repository.SHELL_FLAG_ON { + env, _ := private.GetHookConfig(username, reponame) + if env != nil && len(env) > 0 { + repoSize := env[models.EnvRepoSize] + if repoSize != "" { + os.Setenv(models.EnvRepoSize, repoSize) + } + } + } + } //LFS token authentication if verb == lfsAuthenticateVerb { diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql index 7e06fd99e..e927eb7c2 100644 --- a/models/dbsql/repo_foreigntable_for_es.sql +++ b/models/dbsql/repo_foreigntable_for_es.sql @@ -523,17 +523,21 @@ DROP TRIGGER IF EXISTS es_udpate_repository_lang on public.language_stat; CREATE OR REPLACE FUNCTION public.udpate_repository_lang() RETURNS trigger AS $def$ + DECLARE + privateValue bigint; BEGIN if (TG_OP = 'UPDATE') then - update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id; + select into privateValue updated_unix from public.repository where id=NEW.repo_id; + update public.repository_es SET updated_unix=privateValue,lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id; elsif (TG_OP = 'INSERT') then - update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id; + select into privateValue updated_unix from public.repository where id=NEW.repo_id; + update public.repository_es SET updated_unix=privateValue,lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id; elsif (TG_OP = 'DELETE') then if exists(select 1 from public.repository where id=OLD.repo_id) then update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=OLD.repo_id) where id=OLD.repo_id; end if; end if; - return null; + return NEW; END; $def$ LANGUAGE plpgsql; diff --git a/models/repo.go b/models/repo.go index 25bfb4a74..db2694617 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1554,6 +1554,11 @@ func GetAllMirrorRepositoriesCount() (int64, error) { return x.Where("is_mirror = ?", true).Count(repo) } +func GetAllOrgRepositoriesCount() (int64, error) { + repo := new(Repository) + return x.Table("repository").Join("INNER", []string{"\"user\"", "u"}, "repository.owner_id = u.id and u.type=1").Count(repo) +} + func GetAllForkRepositoriesCount() (int64, error) { repo := new(Repository) return x.Where("is_fork = ?", true).Count(repo) diff --git a/models/summary_statistic.go b/models/summary_statistic.go index e5cf54b75..4e73e2c54 100644 --- a/models/summary_statistic.go +++ b/models/summary_statistic.go @@ -2,6 +2,8 @@ package models import ( "fmt" + "strconv" + "time" "code.gitea.io/gitea/modules/timeutil" ) @@ -45,6 +47,7 @@ type SummaryStatistic struct { NumRepoFork int64 `xorm:"NOT NULL DEFAULT 0"` NumRepoMirror int64 `xorm:"NOT NULL DEFAULT 0"` NumRepoSelf int64 `xorm:"NOT NULL DEFAULT 0"` + NumRepoOrg int64 `xorm:"NOT NULL DEFAULT 0"` CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"` UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"` } @@ -69,6 +72,51 @@ func DeleteSummaryStatisticDaily(date string) error { return nil } +func GetLatest2SummaryStatistic() ([]*SummaryStatistic, error) { + summaryStatistics := make([]*SummaryStatistic, 0) + err := xStatistic.Desc("created_unix").Limit(2).Find(&summaryStatistics) + return summaryStatistics, err +} + +func GetSummaryStatisticByTimeCount(beginTime time.Time, endTime time.Time) (int64, error) { + summaryStatistics := new(SummaryStatistic) + total, err := xStatistic.Asc("created_unix").Where("created_unix>=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<" + strconv.FormatInt(endTime.Unix(), 10)).Count(summaryStatistics) + return total, err +} + +func GetSummaryStatisticByDateCount(dates []string) (int64, error) { + summaryStatistics := new(SummaryStatistic) + total, err := xStatistic.Asc("created_unix").In("date", dates).Count(summaryStatistics) + return total, err +} + + +func GetAllSummaryStatisticByTime(beginTime time.Time, endTime time.Time) ([]*SummaryStatistic, error) { + summaryStatistics := make([]*SummaryStatistic, 0) + err := xStatistic.Asc("created_unix").Where("created_unix>=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<" + strconv.FormatInt(endTime.Unix(), 10)).Find(&summaryStatistics) + + return summaryStatistics, err +} + +func GetSummaryStatisticByTime(beginTime time.Time, endTime time.Time, page int, pageSize int) ([]*SummaryStatistic, error) { + summaryStatistics := make([]*SummaryStatistic, 0) + err := xStatistic.Asc("created_unix").Limit(pageSize+1, (page-1)*pageSize).Where("created_unix>=" + strconv.FormatInt(beginTime.Unix(), 10) + " and created_unix<" + strconv.FormatInt(endTime.Unix(), 10)).Find(&summaryStatistics) + + return summaryStatistics, err +} + +func GetAllSummaryStatisticByDates(dates []string) ([]*SummaryStatistic, error) { + summaryStatistics := make([]*SummaryStatistic, 0) + err := xStatistic.Asc("created_unix").In("date", dates).Find(&summaryStatistics) + return summaryStatistics, err +} + +func GetSummaryStatisticByDates(dates []string, page int, pageSize int) ([]*SummaryStatistic, error) { + summaryStatistics := make([]*SummaryStatistic, 0) + err := xStatistic.Asc("created_unix").In("date", dates).Limit(pageSize+1, (page-1)*pageSize).Find(&summaryStatistics) + return summaryStatistics, err +} + func InsertSummaryStatistic(summaryStatistic *SummaryStatistic) (int64, error) { return xStatistic.Insert(summaryStatistic) } diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index 2d7592baf..4de0c6d1a 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -4,6 +4,7 @@ import ( "fmt" "sort" "strconv" + "strings" "time" "code.gitea.io/gitea/modules/log" @@ -227,7 +228,27 @@ func getLastCountDate() int64 { return pageStartTime.Unix() } -func QueryMetrics(start int64, end int64) ([]*UserMetrics, int64) { +func QueryMetricsPage(start int64, end int64, page int, pageSize int) ([]*UserMetrics, int64) { + + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + cond := "count_date >" + fmt.Sprint(start) + " and count_date<" + fmt.Sprint(end) + allCount, err := statictisSess.Where(cond).Count(new(UserMetrics)) + if err != nil { + log.Info("query error." + err.Error()) + return nil, 0 + } + userMetricsList := make([]*UserMetrics, 0) + //.Limit(pageSize, page*pageSize) + if err := statictisSess.Table(new(UserMetrics)).Where(cond).OrderBy("count_date desc"). + Find(&userMetricsList); err != nil { + return nil, 0 + } + postDeal(userMetricsList) + return userMetricsList, allCount +} + +func QueryMetrics(start int64, end int64) ([]*UserMetrics, int) { statictisSess := xStatistic.NewSession() defer statictisSess.Close() userMetricsList := make([]*UserMetrics, 0) @@ -235,7 +256,76 @@ func QueryMetrics(start int64, end int64) ([]*UserMetrics, int64) { Find(&userMetricsList); err != nil { return nil, 0 } - return userMetricsList, int64(len(userMetricsList)) + postDeal(userMetricsList) + return userMetricsList, len(userMetricsList) +} + +func postDeal(userMetricsList []*UserMetrics) { + for _, userMetrics := range userMetricsList { + userMetrics.DisplayDate = userMetrics.DataDate + userMetrics.TotalRegistUser = userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser + userMetrics.TotalNotActivateRegistUser = userMetrics.TotalUser - userMetrics.TotalActivateRegistUser + } +} + +func QueryMetricsForAll() []*UserMetrics { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + userMetricsList := make([]*UserMetrics, 0) + if err := statictisSess.Table(new(UserMetrics)).OrderBy("count_date desc"). + Find(&userMetricsList); err != nil { + return nil + } + return makeResultForMonth(userMetricsList, len(userMetricsList)) +} + +func QueryMetricsForYear() []*UserMetrics { + currentTimeNow := time.Now() + currentYearEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + currentYearStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) + allUserInfo, count := QueryMetrics(currentYearStartTime.Unix(), currentYearEndTime.Unix()) + + return makeResultForMonth(allUserInfo, count) +} + +func makeResultForMonth(allUserInfo []*UserMetrics, count int) []*UserMetrics { + monthMap := make(map[string]*UserMetrics) + if count > 0 { + for _, userMetrics := range allUserInfo { + dateTime := time.Unix(userMetrics.CountDate, 0) + month := fmt.Sprint(dateTime.Year()) + "-" + fmt.Sprint(int(dateTime.Month())) + if _, ok := monthMap[month]; !ok { + monthUserMetrics := &UserMetrics{ + DisplayDate: month, + ActivateRegistUser: userMetrics.ActivateRegistUser, + NotActivateRegistUser: userMetrics.NotActivateRegistUser, + TotalUser: userMetrics.TotalUser, + TotalNotActivateRegistUser: userMetrics.TotalUser - userMetrics.TotalActivateRegistUser, + TotalActivateRegistUser: userMetrics.TotalActivateRegistUser, + TotalHasActivityUser: userMetrics.TotalHasActivityUser, + HasActivityUser: userMetrics.HasActivityUser, + DaysForMonth: 1, + TotalRegistUser: userMetrics.ActivateRegistUser + userMetrics.NotActivateRegistUser, + } + monthMap[month] = monthUserMetrics + } else { + value := monthMap[month] + value.ActivateRegistUser += userMetrics.ActivateRegistUser + value.NotActivateRegistUser += userMetrics.NotActivateRegistUser + value.HasActivityUser += userMetrics.HasActivityUser + value.TotalRegistUser += userMetrics.TotalRegistUser + value.DaysForMonth += 1 + } + } + } + result := make([]*UserMetrics, 0) + for _, value := range monthMap { + result = append(result, value) + } + sort.Slice(result, func(i, j int) bool { + return strings.Compare(result[i].DisplayDate, result[j].DisplayDate) > 0 + }) + return result } func QueryRankList(key string, tableName string, limit int) ([]*UserBusinessAnalysisAll, int64) { @@ -540,6 +630,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS if minUserIndex > dateRecordAll.UserIndexPrimitive { minUserIndex = dateRecordAll.UserIndexPrimitive } + dateRecordBatch = append(dateRecordBatch, dateRecordAll) if len(dateRecordBatch) >= BATCH_INSERT_SIZE { insertTable(dateRecordBatch, tableName, statictisSess) @@ -695,7 +786,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, log.Info("query user error. return.") return err } - + userNewAddActivity := make(map[int64]map[int64]int64) ParaWeight := getParaWeight() userMetrics := make(map[string]int) var indexTotal int64 @@ -767,6 +858,9 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight) setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord) + if getUserActivate(dateRecord) > 0 { + addUserToMap(userNewAddActivity, userRecord.CreatedUnix, dateRecord.ID) + } _, err = statictisSess.Insert(&dateRecord) if err != nil { log.Info("insert daterecord failed." + err.Error()) @@ -785,18 +879,71 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, //insert userMetrics table var useMetrics UserMetrics useMetrics.CountDate = CountDate.Unix() + useMetrics.DataDate = DataDate statictisSess.Delete(&useMetrics) useMetrics.ActivateRegistUser = getMapKeyStringValue("ActivateRegistUser", userMetrics) useMetrics.HasActivityUser = getMapKeyStringValue("HasActivityUser", userMetrics) + useMetrics.RegistActivityUser = 0 useMetrics.NotActivateRegistUser = getMapKeyStringValue("NotActivateRegistUser", userMetrics) useMetrics.TotalActivateRegistUser = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) useMetrics.TotalHasActivityUser = getMapKeyStringValue("TotalHasActivityUser", userMetrics) - statictisSess.Insert(&useMetrics) + count, err = sess.Where("type=0").Count(new(User)) + if err != nil { + log.Info("query user error. return.") + } + useMetrics.TotalUser = int(count) + if useMetrics.ActivateRegistUser+useMetrics.NotActivateRegistUser == 0 { + useMetrics.ActivateIndex = 0 + } else { + useMetrics.ActivateIndex = float64(useMetrics.ActivateRegistUser) / float64(useMetrics.ActivateRegistUser+useMetrics.NotActivateRegistUser) + } + statictisSess.Insert(&useMetrics) + //update new user activity + updateNewUserAcitivity(userNewAddActivity, statictisSess) return nil } +func updateNewUserAcitivity(currentUserActivity map[int64]map[int64]int64, statictisSess *xorm.Session) { + for key, value := range currentUserActivity { + useMetrics := &UserMetrics{CountDate: key} + has, err := statictisSess.Get(useMetrics) + if err == nil && has { + userIdArrays := strings.Split(useMetrics.HasActivityUserJson, ",") + for _, userIdStr := range userIdArrays { + userIdInt, err := strconv.ParseInt(userIdStr, 10, 64) + if err == nil { + value[userIdInt] = userIdInt + } + } + userIdArray := "" + for _, tmpValue := range value { + userIdArray += fmt.Sprint(tmpValue) + "," + } + useMetrics.HasActivityUser = len(value) + if len(userIdArray) > 0 { + useMetrics.HasActivityUserJson = userIdArray[0 : len(userIdArray)-1] + } + updateSql := "update public.user_metrics set has_activity_user_json=" + useMetrics.HasActivityUserJson + ",regist_activity_user=" + fmt.Sprint(useMetrics.HasActivityUser) + " where count_date=" + fmt.Sprint(key) + statictisSess.Exec(updateSql) + } + } +} + +func addUserToMap(currentUserActivity map[int64]map[int64]int64, registDate timeutil.TimeStamp, userId int64) { + CountDateTime := time.Date(registDate.Year(), registDate.AsTime().Month(), registDate.AsTime().Day(), 0, 1, 0, 0, registDate.AsTime().Location()) + CountDate := CountDateTime.Unix() + if _, ok := currentUserActivity[CountDate]; !ok { + userIdMap := make(map[int64]int64, 0) + userIdMap[userId] = userId + currentUserActivity[CountDate] = userIdMap + } else { + currentUserActivity[CountDate][userId] = userId + } + +} + func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, end_time int64, dateRecord UserBusinessAnalysis) { //ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` //NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` diff --git a/models/user_business_struct.go b/models/user_business_struct.go index 86aecd545..70f806c78 100644 --- a/models/user_business_struct.go +++ b/models/user_business_struct.go @@ -400,10 +400,19 @@ type UserAnalysisPara struct { } type UserMetrics struct { - CountDate int64 `xorm:"pk"` - ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` - NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` - HasActivityUser int `xorm:"NOT NULL DEFAULT 0"` - TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` - TotalHasActivityUser int `xorm:"NOT NULL DEFAULT 0"` + CountDate int64 `xorm:"pk"` + ActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` + NotActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` + ActivateIndex float64 `xorm:"NOT NULL DEFAULT 0"` + RegistActivityUser int `xorm:"NOT NULL DEFAULT 0"` + HasActivityUser int `xorm:"NOT NULL DEFAULT 0"` + TotalUser int `xorm:"NOT NULL DEFAULT 0"` + TotalRegistUser int `xorm:"-"` + TotalActivateRegistUser int `xorm:"NOT NULL DEFAULT 0"` + TotalNotActivateRegistUser int `xorm:"-"` + TotalHasActivityUser int `xorm:"NOT NULL DEFAULT 0"` + DisplayDate string `xorm:"-"` + DataDate string `xorm:"NULL"` + DaysForMonth int `xorm:"NOT NULL DEFAULT 0"` + HasActivityUserJson string `xorm:"text NULL"` } diff --git a/modules/auth/repo_form.go b/modules/auth/repo_form.go index c113aa890..ab724e923 100755 --- a/modules/auth/repo_form.go +++ b/modules/auth/repo_form.go @@ -740,3 +740,9 @@ type CreateCourseForm struct { func (f *CreateCourseForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors { return validate(errs, ctx.Data, f, ctx.Locale) } + +// RenameRepoFileForm form for renaming repository file +type RenameRepoFileForm struct { + TreePath string `binding:"Required;MaxSize(500)"` + LastCommit string +} diff --git a/modules/private/hook.go b/modules/private/hook.go index 010fc4d72..fa367f150 100644 --- a/modules/private/hook.go +++ b/modules/private/hook.go @@ -50,6 +50,11 @@ type HookPostReceiveBranchResult struct { URL string } +// HookEnvResult +type HookEnvResult struct { + Config map[string]string +} + // HookPreReceive check whether the provided commits are allowed func HookPreReceive(ownerName, repoName string, opts HookOptions) (int, string) { reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/pre-receive/%s/%s", @@ -122,3 +127,28 @@ func SetDefaultBranch(ownerName, repoName, branch string) error { } return nil } + +// GetHookConfig get hook config to set environment for hook script +func GetHookConfig(ownerName, repoName string) (map[string]string, string) { + reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/env/%s/%s", + url.PathEscape(ownerName), + url.PathEscape(repoName), + ) + + req := newInternalRequest(reqURL, "GET") + req = req.Header("Content-Type", "application/json") + req.SetTimeout(60*time.Second, time.Duration(60)*time.Second) + resp, err := req.Response() + if err != nil { + return nil, fmt.Sprintf("Unable to contact gitea: %v", err.Error()) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, decodeJSONError(resp).Err + } + res := &HookEnvResult{} + _ = json.NewDecoder(resp.Body).Decode(res) + + return res.Config, "" +} diff --git a/modules/repofiles/temp_repo.go b/modules/repofiles/temp_repo.go index 89f9b0b20..66f7e3487 100644 --- a/modules/repofiles/temp_repo.go +++ b/modules/repofiles/temp_repo.go @@ -109,6 +109,34 @@ func (t *TemporaryUploadRepository) LsFiles(filenames ...string) ([]string, erro return filelist, nil } +// LsFilesStage list all files with stage format in index for the given paths +// if the given path is directory ,then return all files under it +// if the given path is file ,then return the file +func (t *TemporaryUploadRepository) LsFilesStage(paths ...string) ([]string, error) { + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + + cmdArgs := []string{"ls-files", "-z", "-s", "--"} + for _, arg := range paths { + if arg != "" { + cmdArgs = append(cmdArgs, arg) + } + } + + if err := git.NewCommand(cmdArgs...).RunInDirPipeline(t.basePath, stdOut, stdErr); err != nil { + log.Error("Unable to run git ls-files for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) + err = fmt.Errorf("Unable to run git ls-files for temporary repo of: %s Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) + return nil, err + } + + filelist := make([]string, 0) + for _, line := range bytes.Split(stdOut.Bytes(), []byte{'\000'}) { + filelist = append(filelist, string(line)) + } + + return filelist, nil +} + // RemoveFilesFromIndex removes the given files from the index func (t *TemporaryUploadRepository) RemoveFilesFromIndex(filenames ...string) error { stdOut := new(bytes.Buffer) diff --git a/modules/repofiles/update.go b/modules/repofiles/update.go index d65f61c84..d7751d50e 100644 --- a/modules/repofiles/update.go +++ b/modules/repofiles/update.go @@ -756,3 +756,210 @@ func createCommitRepoActions(repo *models.Repository, gitRepo *git.Repository, o } return actions, nil } + +// RenameRepoFileOptions +type RenameRepoFileOptions struct { + LastCommitID string + BranchName string + TreePath string + FromTreePath string + Message string + Author *IdentityOptions + Committer *IdentityOptions +} + +// RenameRepoFile rename file in the given repository +func RenameRepoFile(repo *models.Repository, doer *models.User, opts *RenameRepoFileOptions) error { + + // Branch must exist for this operation + if _, err := repo_module.GetBranch(repo, opts.BranchName); err != nil { + return err + } + + //make sure user can commit to the given branch + if err := checkBranchProtection(doer, repo, opts.BranchName, opts.TreePath); err != nil { + return err + } + + // Check that the path given in opts.treePath is valid (not a git path) + treePath := CleanUploadFileName(opts.TreePath) + if treePath == "" { + return models.ErrFilenameInvalid{ + Path: opts.TreePath, + } + } + // If there is a fromTreePath (we are copying it), also clean it up + fromTreePath := CleanUploadFileName(opts.FromTreePath) + if fromTreePath == "" && opts.FromTreePath != "" { + return models.ErrFilenameInvalid{ + Path: opts.FromTreePath, + } + } + + author, committer := GetAuthorAndCommitterUsers(opts.Author, opts.Committer, doer) + + t, err := NewTemporaryUploadRepository(repo) + if err != nil { + log.Error("%v", err) + } + defer t.Close() + if err := t.Clone(opts.BranchName); err != nil { + return err + } + if err := t.SetDefaultIndex(); err != nil { + return err + } + + // Get the commit of the original branch + commit, err := t.GetBranchCommit(opts.BranchName) + if err != nil { + return err // Couldn't get a commit for the branch + } + + lastCommitID, err := t.gitRepo.ConvertToSHA1(opts.LastCommitID) + if err != nil { + return fmt.Errorf("DeleteRepoFile: Invalid last commit ID: %v", err) + } + opts.LastCommitID = lastCommitID.String() + + if opts.LastCommitID == "" { + // When updating a file, a lastCommitID needs to be given to make sure other commits + // haven't been made. We throw an error if one wasn't provided. + return models.ErrSHAOrCommitIDNotProvided{} + } + + //if fromTreePath not exist,return error + _, err = commit.GetTreeEntryByPath(fromTreePath) + if err != nil { + return err + } + + // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw + // an error. + if commit.ID.String() != opts.LastCommitID { + if changed, err := commit.FileChangedSinceCommit(fromTreePath, opts.LastCommitID); err != nil { + return err + } else if changed { + return models.ErrCommitIDDoesNotMatch{ + GivenCommitID: opts.LastCommitID, + CurrentCommitID: opts.LastCommitID, + } + } + } + + //if treePath has been exist,return error + _, err = commit.GetTreeEntryByPath(treePath) + if err == nil || !git.IsErrNotExist(err) { + // Means the file has been exist in new path + return models.ErrFilePathInvalid{ + Message: fmt.Sprintf("a file exists where you’re trying to create a subdirectory [path: %s]", treePath), + Path: treePath, + Name: treePath, + Type: git.EntryModeBlob, + } + } + + //move and add files to index + if err = moveAndAddFiles(fromTreePath, treePath, t); err != nil { + return err + } + + // Now write the tree + treeHash, err := t.WriteTree() + if err != nil { + return err + } + + // Now commit the tree + message := strings.TrimSpace(opts.Message) + commitHash, err := t.CommitTree(author, committer, treeHash, message) + if err != nil { + return err + } + + // Then push this tree to NewBranch + if err := t.Push(doer, commitHash, opts.BranchName); err != nil { + log.Error("%T %v", err, err) + return err + } + + return nil +} + +func checkBranchProtection(doer *models.User, repo *models.Repository, branchName, treePath string) error { + //make sure user can commit to the given branch + protectedBranch, err := repo.GetBranchProtection(branchName) + if err != nil { + return err + } + if protectedBranch != nil { + if !protectedBranch.CanUserPush(doer.ID) { + return models.ErrUserCannotCommit{ + UserName: doer.LowerName, + } + } + if protectedBranch.RequireSignedCommits { + _, _, err := repo.SignCRUDAction(doer, repo.RepoPath(), branchName) + if err != nil { + if !models.IsErrWontSign(err) { + return err + } + return models.ErrUserCannotCommit{ + UserName: doer.LowerName, + } + } + } + patterns := protectedBranch.GetProtectedFilePatterns() + for _, pat := range patterns { + if pat.Match(strings.ToLower(treePath)) { + return models.ErrFilePathProtected{ + Path: treePath, + } + } + } + } + return nil +} + +func moveAndAddFiles(oldTreePath, newTreePath string, t *TemporaryUploadRepository) error { + array, err := t.LsFilesStage(oldTreePath) + if err != nil { + return err + } + if len(array) == 0 { + return git.ErrNotExist{RelPath: oldTreePath} + } + stdOut := new(bytes.Buffer) + stdErr := new(bytes.Buffer) + stdIn := new(bytes.Buffer) + //write all files in stage format to the stdin, + //for each file,remove old tree path and add new tree path + //see the update-index help document at https://git-scm.com/docs/git-update-index + //especially see the content of "USING --INDEX-INFO" + for _, v := range array { + if v == "" { + continue + } + //example for v(mode SHA-1 stage file) + //100755 d294c88235ac05d3dece028d8a65590f28ec46ac 0 custom/conf/app.ini + v = strings.ReplaceAll(v, "0\t", "") + tmpArray := strings.Split(v, " ") + oldPath := tmpArray[2] + newPath := newTreePath + strings.TrimPrefix(oldPath, oldTreePath) + // mode 0 means remove file + stdIn.WriteString("0 0000000000000000000000000000000000000000\t") + stdIn.WriteString(oldPath) + stdIn.WriteByte('\000') + stdIn.WriteString(tmpArray[0] + " ") + stdIn.WriteString(tmpArray[1] + "\t") + stdIn.WriteString(newPath) + stdIn.WriteByte('\000') + } + + if err := git.NewCommand("update-index", "--replace", "-z", "--index-info").RunInDirFullPipeline(t.basePath, stdOut, stdErr, stdIn); err != nil { + log.Error("Unable to update-index for temporary repo: %s (%s) Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), t.basePath, err, stdOut.String(), stdErr.String()) + return fmt.Errorf("Unable to update-index for temporary repo: %s Error: %v\nstdout: %s\nstderr: %s", t.repo.FullName(), err, stdOut.String(), stdErr.String()) + } + + return nil +} diff --git a/modules/repository/hooks.go b/modules/repository/hooks.go index 7bcc5b550..82d02b3f1 100644 --- a/modules/repository/hooks.go +++ b/modules/repository/hooks.go @@ -36,7 +36,7 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []str fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf), } sizeLimitTpls = []string{ - fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf), + fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \" \\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nnew_branch_flag=0\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\n new_branch_flag=1\n echo \"You are creating a new remote branch,openI will check all files in commit history to find oversize files\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)' | \\\n awk -F ' ' -v maxbytes=\"$maxsize\" 'BEGIN {totalIn=0} {if( $3 > maxbytes && $2 == \"blob\") { totalIn+=$3; print $4} else { totalIn+=$3}} END { printf (\"totalIn=\\t%%s\",totalIn)}' )\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\nIFS=$'\\n'\n# rewrite IFS to seperate line in $files\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n \n if [[ ${file} == totalIn=* ]]; then\n\tIFS=$'\\t'\n\ttemp_array=(${file})\n\tpush_size=${temp_array[1]}\n\tcontinue\n fi\n\tunset IFS\n if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"help document -- https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\t\n fi\n echo -e \"\\033[31m- ${file}\\033[0m \"\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n# if create new branch or tag,use count-objects -v to get pack size\nif [[ $new_branch_flag -eq 1 ]]; then\n size_kb=`git count-objects -v | grep 'size-pack' | sed 's/.*\\(size-pack:\\).//'`\n size_pack_kb=`git count-objects -v | grep 'size:' | sed 's/.*\\(size:\\).//'`\n\ttotal_kb=`expr $size_kb + $size_pack_kb`\n\tlet push_size=$total_kb*1024\nfi\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n echo \"see the help document--https://git.openi.org.cn/zeizei/OpenI_Learning/src/branch/master/docs/git/repository_capacity_help.md\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS", setting.ScriptType), fmt.Sprintf(""), fmt.Sprintf(""), } diff --git a/modules/ssh/ssh.go b/modules/ssh/ssh.go index ac590a057..fe13d5484 100644 --- a/modules/ssh/ssh.go +++ b/modules/ssh/ssh.go @@ -5,6 +5,7 @@ package ssh import ( + "code.gitea.io/gitea/services/repository" "crypto/rand" "crypto/rsa" "crypto/x509" @@ -74,7 +75,7 @@ func sessionHandler(session ssh.Session) { models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag), ) - if strings.HasPrefix(command, "git-receive-pack") { + if strings.HasPrefix(command, "git-receive-pack") && setting.Repository.Upload.ShellFlag == repository.SHELL_FLAG_ON { repo := getRepoFromCommandStr(command) if repo != nil { cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size)) diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 6fdffa45e..7cd1d8486 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -237,6 +237,8 @@ page_recommend_repo_desc=Excellent AI projects recommendation. To show your proj page_recommend_repo_commit=Click here to submit. page_recommend_repo_go=Click here to page_recommend_repo_more=explore more projects. +page_recommend_activity=Community Activities +page_recommend_activity_desc=The community has prepared a wealth of activities, waiting for you to participate! page_dev_env=Collaborative Development Environment page_dev_env_desc=Provide a collaborative development environment for AI development, which is the biggest highlight that distinguishes the OpenI AI Collaboration Platform from other traditional Git platforms. page_dev_env_desc_title=Unified Management of Development Elements @@ -261,7 +263,7 @@ search_issue=Issue search_pr=Pull Request search_user=User search_org=Organization -search_finded=Find +search_finded=Find search_related=related search_maybe=maybe search_ge= @@ -274,7 +276,7 @@ use_plt__fuction = To use the AI collaboration functions provided by this platfo provide_resoure = Computing resources of CPU/GPU/NPU are provided freely for various types of AI tasks. activity = Activity no_events = There are no events related -or_t = or +or_t = or [explore] repos = Repositories @@ -525,6 +527,19 @@ static.public.user_business_analysis_last30_day=Last_30_day static.public.user_business_analysis_last_month=Last_Month static.public.user_business_analysis_yesterday=Yesterday static.public.user_business_analysis_all=All + +metrics.sheetname=User Trend Analysis +metrics.date=Count Date +metrics.newregistuser=New registered user +metrics.newregistandactiveuser=New activated +metrics.hasactivateuser=New contributing activities +metrics.newregistnotactiveuser=New inactive +metrics.averageuser=Average new users +metrics.newuseractiveindex=Activation rate of new users +metrics.totalregistuser=Cumulative registered users +metrics.totalactiveduser=Cumulative activated users +metrics.totalhasactivityuser=Cumulative active users + [settings] profile = Profile account = Account @@ -945,6 +960,15 @@ model_manager = Model model_noright=No right model_rename=Duplicate model name, please modify model name. +date=Date +repo_add=Project Increment +repo_total=Project Total +repo_public_add=Public Project Increment +repo_private_add=Private Project Increment +repo_fork_add=Fork Project Increment +repo_mirror_add=Mirror Project Increment +repo_self_add=Custom Project Increment + debug=Debug debug_again=Restart stop=Stop @@ -1009,7 +1033,9 @@ get_repo_stat_error=Can not get the statistics of the repository. get_repo_info_error=Can not get the information of the repository. generate_statistic_file_error=Failed to generate file. repo_stat_inspect=ProjectAnalysis +repo_stat_develop=ProjectGrowthAnalysis all=All +current_year=Current_Year computing.all = All computing.Introduction=Introduction @@ -1305,6 +1331,8 @@ editor.require_signed_commit = Branch requires a signed commit editor.repo_too_large = Repository can not exceed %d MB editor.repo_file_invalid = Upload files are invalid editor.upload_file_too_much = Can not upload more than %d files at a time +editor.rename = rename "%s" to %s" +editor.file_changed_while_renaming=The version of the file or folder to be renamed has changed. Please refresh the page and try again commits.desc = Browse source code change history. @@ -1369,7 +1397,7 @@ issues.add_milestone_at = `added this to the %s milestone %s` issues.change_milestone_at = `modified the milestone from %s to %s %s` issues.remove_milestone_at = `removed this from the %s milestone %s` -issues.add_branch_at=`added this to the %s branch %s` +issues.add_branch_at=`added this to the %s branch %s` issues.add_tag_at =`added this to the %s tag %s` issues.change_branch_tag_at= `modified the branch/tag from %s to %s %s` issues.remove_branch_at=`removed this from the %s branch %s` @@ -3022,4 +3050,4 @@ BRAINSCORE = BENCHMARK TRAIN = TRAIN INFERENCE = INFERENCE BENCHMARK = BENCHMARK -brain_area = Brain Area \ No newline at end of file +brain_area = Brain Area diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index ea65d8fc8..dd2e656d9 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -239,6 +239,8 @@ page_recommend_repo_desc=优秀的AI项目推荐;你的项目也想展示到 page_recommend_repo_commit=点此提交 page_recommend_repo_go=。进入 page_recommend_repo_more=项目广场 +page_recommend_activity=社区活动 +page_recommend_activity_desc=社区准备了丰富的活动,等你来参加! page_dev_env=协同开发环境 page_dev_env_desc=启智AI协作开发平台与传统git平台最大的不同就在于提供了面向AI开发的协同开发环境 page_dev_env_desc_title=开发要素统一管理 @@ -269,7 +271,7 @@ search_maybe=约为 search_ge=个 wecome_AI_plt=欢迎来到启智AI协作平台! -explore_AI = 探索更好的AI,来这里发现更有意思的 +explore_AI = 探索更好的AI,来这里发现更有意思的 datasets = 数据集 repositories = 项目 use_plt__fuction = 使用本平台提供的AI协作功能,如:托管代码、共享数据、调试算法或训练模型,请先 @@ -277,7 +279,7 @@ provide_resoure = 平台目前免费提供CPU、GPU、NPU的算力资源,可 create_pro = 创建项目 activity = 活动 no_events = 还没有与您相关的活动 -or_t = 或 +or_t = 或 [explore] @@ -530,6 +532,19 @@ static.public.user_business_analysis_last30_day=近30天 static.public.user_business_analysis_last_month=上月 static.public.user_business_analysis_yesterday=昨天 static.public.user_business_analysis_all=所有 + +metrics.sheetname=用户趋势分析 +metrics.date=日期 +metrics.newregistuser=新增注册用户 +metrics.newregistandactiveuser=新增已激活 +metrics.hasactivateuser=新增有贡献活动 +metrics.newregistnotactiveuser=新增未激活 +metrics.averageuser=平均新增用户 +metrics.newuseractiveindex=新增用户激活率 +metrics.totalregistuser=累计注册用户 +metrics.totalactiveduser=累计已激活 +metrics.totalhasactivityuser=累计有贡献活动 + [settings] profile=个人信息 account=账号 @@ -946,6 +961,16 @@ model_manager = 模型 model_noright=无权限操作 model_rename=模型名称重复,请修改模型名称 + +date=日期 +repo_add=新增项目 +repo_total=累计项目 +repo_public_add=新增公开项目 +repo_private_add=新增私有项目 +repo_fork_add=新增派生项目 +repo_mirror_add=新增镜像项目 +repo_self_add=新增自建项目 + debug=调试 debug_again=再次调试 stop=停止 @@ -1017,7 +1042,9 @@ get_repo_stat_error=查询当前仓库的统计信息失败。 get_repo_info_error=查询当前仓库信息失败。 generate_statistic_file_error=生成文件失败。 repo_stat_inspect=项目分析 +repo_stat_develop=项目增长趋势 all=所有 +current_year=今年 computing.all=全部 computing.Introduction=简介 @@ -1316,6 +1343,8 @@ editor.require_signed_commit=分支需要签名提交 editor.repo_too_large = 代码仓总大小不能超过%dMB editor.repo_file_invalid = 提交的文件非法 editor.upload_file_too_much = 不能同时提交超过%d个文件 +editor.rename = 重命名"%s"为"%s" +editor.file_changed_while_renaming=待重命名的文件或文件夹版本已发生变化,请您刷新页面后重试 commits.desc=浏览代码修改历史 commits.commits=次代码提交 diff --git a/public/home/home.js b/public/home/home.js index d8e423def..3b2a34f06 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -6,6 +6,7 @@ if(isEmpty(token)){ token = meta.attr("content"); } } + var swiperNewMessage = new Swiper(".newslist", { direction: "vertical", slidesPerView: 10, @@ -15,6 +16,18 @@ var swiperNewMessage = new Swiper(".newslist", { disableOnInteraction: false, }, }); +var swiperEvent = new Swiper(".event-list", { + slidesPerView: 2, + spaceBetween: 30, + pagination: { + el: ".swiper-pagination", + clickable: true, + }, + autoplay: { + delay: 2500, + disableOnInteraction: false, + }, +}); var swiperRepo = new Swiper(".homepro-list", { slidesPerView: 1, slidesPerColumn: 2, @@ -433,6 +446,38 @@ function queryRecommendData(){ } }); + $.ajax({ + type:"GET", + url:"/recommend/imageinfo", + headers: { + authorization:token, + }, + dataType:"json", + async:false, + success:function(json){ + displayActivity(json); + }, + error:function(response) { + } + }); +} + +function displayActivity(json){ + var activityDiv = document.getElementById("recommendactivity"); + var html = ""; + if (json != null && json.length > 0){ + for(var i = 0; i < json.length;i++){ + var record = json[i] + html += "
"; + html += ""; + html += "
" + html += "
"; + html += "
"; + } + } + activityDiv.innerHTML = html; + swiperEvent.updateSlides(); + swiperEvent.updateProgress(); } function displayRepo(json){ diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index 2b070a4b8..d2c6e3633 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -535,6 +535,9 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/restoreFork", repo.RestoreForkNumber) m.Get("/downloadAll", repo.ServeAllProjectsPeriodStatisticsFile) m.Get("/downloadAllOpenI", repo.ServeAllProjectsOpenIStatisticsFile) + m.Get("/summary", repo.GetLatestProjectsSummaryData) + m.Get("/summary/period", repo.GetProjectsSummaryData) + m.Get("/summary/download", repo.GetProjectsSummaryDataFile) m.Group("/project", func() { m.Get("", repo.GetAllProjectsPeriodStatistics) m.Get("/numVisit", repo.ProjectNumVisit) @@ -547,7 +550,15 @@ func RegisterRoutes(m *macaron.Macaron) { }) }, operationReq) - m.Get("/query_user_metrics", operationReq, repo_ext.QueryMetrics) + m.Get("/query_metrics_current_month", operationReq, repo_ext.QueryUserMetricsCurrentMonth) + m.Get("/query_metrics_current_week", operationReq, repo_ext.QueryUserMetricsCurrentWeek) + m.Get("/query_metrics_current_year", operationReq, repo_ext.QueryUserMetricsCurrentYear) + m.Get("/query_metrics_last30_day", operationReq, repo_ext.QueryUserMetricsLast30Day) + m.Get("/query_metrics_last_month", operationReq, repo_ext.QueryUserMetricsLastMonth) + m.Get("/query_metrics_yesterday", operationReq, repo_ext.QueryUserMetricsYesterday) + m.Get("/query_metrics_all", operationReq, repo_ext.QueryUserMetricsAll) + m.Get("/query_user_metrics_page", operationReq, repo_ext.QueryUserMetricDataPage) + m.Get("/query_user_rank_list", operationReq, repo_ext.QueryRankingList) m.Get("/query_user_static_page", operationReq, repo_ext.QueryUserStaticDataPage) m.Get("/query_user_current_month", operationReq, repo_ext.QueryUserStaticCurrentMonth) diff --git a/routers/api/v1/repo/repo_dashbord.go b/routers/api/v1/repo/repo_dashbord.go index 2c42f8a16..95c0e399e 100644 --- a/routers/api/v1/repo/repo_dashbord.go +++ b/routers/api/v1/repo/repo_dashbord.go @@ -20,8 +20,10 @@ import ( const DEFAULT_PAGE_SIZE = 10 const DATE_FORMAT = "2006-01-02" +const MONTH_FORMAT = "2006-01" const EXCEL_DATE_FORMAT = "20060102" const CREATE_TIME_FORMAT = "2006/01/02 15:04:05" +const UPDATE_TIME_FORMAT = "2006-01-02 15:04:05" type ProjectsPeriodData struct { RecordBeginTime string `json:"recordBeginTime"` @@ -60,6 +62,38 @@ type ProjectLatestData struct { Top10 []UserInfo `json:"top10"` } +type ProjectSummaryBaseData struct { + NumReposAdd int64 `json:"numReposAdd"` + NumRepoPublicAdd int64 `json:"numRepoPublicAdd"` + NumRepoPrivateAdd int64 `json:"numRepoPrivateAdd"` + NumRepoForkAdd int64 `json:"numRepoForkAdd"` + NumRepoMirrorAdd int64 `json:"numRepoMirrorAdd"` + NumRepoSelfAdd int64 `json:"numRepoSelfAdd"` + NumRepos int64 `json:"numRepos"` + CreatTime string `json:"creatTime"` +} + +type ProjectSummaryData struct { + ProjectSummaryBaseData + NumRepoPublic int64 `json:"numRepoPublic"` + NumRepoPrivate int64 `json:"numRepoPrivate"` + NumRepoFork int64 `json:"numRepoFork"` + NumRepoMirror int64 `json:"numRepoMirror"` + NumRepoSelf int64 `json:"numRepoSelf"` + + NumRepoOrgAdd int64 `json:"numRepoOrgAdd"` + NumRepoNotOrgAdd int64 `json:"numRepoNotOrgAdd"` + + NumRepoOrg int64 `json:"numRepoOrg"` + NumRepoNotOrg int64 `json:"numRepoNotOrg"` +} + +type ProjectSummaryPeriodData struct { + RecordBeginTime string `json:"recordBeginTime"` + TotalCount int64 `json:"totalCount"` + PageRecords []*ProjectSummaryBaseData `json:"pageRecords"` +} + func RestoreForkNumber(ctx *context.Context) { repos, err := models.GetAllRepositories() if err != nil { @@ -73,6 +107,146 @@ func RestoreForkNumber(ctx *context.Context) { ctx.JSON(http.StatusOK, struct{}{}) } +func GetLatestProjectsSummaryData(ctx *context.Context) { + stat, err := models.GetLatest2SummaryStatistic() + data := ProjectSummaryData{} + if err == nil && len(stat) > 0 { + data.NumRepos = stat[0].NumRepos + data.NumRepoOrg = stat[0].NumRepoOrg + data.NumRepoNotOrg = stat[0].NumRepos - stat[0].NumRepoOrg + data.NumRepoFork = stat[0].NumRepoFork + data.NumRepoMirror = stat[0].NumRepoMirror + data.NumRepoSelf = stat[0].NumRepoSelf + data.NumRepoPrivate = stat[0].NumRepoPrivate + data.NumRepoPublic = stat[0].NumRepoPublic + data.CreatTime = stat[0].CreatedUnix.Format(UPDATE_TIME_FORMAT) + if len(stat) == 2 { + data.NumReposAdd = stat[0].NumRepos - stat[1].NumRepos + data.NumRepoOrgAdd = stat[0].NumRepoOrg - stat[1].NumRepoOrg + data.NumRepoNotOrgAdd = (stat[0].NumRepos - stat[0].NumRepoOrg) - (stat[1].NumRepos - stat[1].NumRepoOrg) + data.NumRepoForkAdd = stat[0].NumRepoFork - stat[1].NumRepoFork + data.NumRepoMirrorAdd = stat[0].NumRepoMirror - stat[1].NumRepoMirror + data.NumRepoSelfAdd = stat[0].NumRepoSelf - stat[1].NumRepoSelf + data.NumRepoPrivateAdd = stat[0].NumRepoPrivate - stat[1].NumRepoPrivate + data.NumRepoPublicAdd = stat[0].NumRepoPublic - stat[1].NumRepoPublic + } + } + ctx.JSON(200, data) +} + +func GetProjectsSummaryData(ctx *context.Context) { + + var datas = make([]*ProjectSummaryBaseData, 0) + + recordBeginTime, err := getRecordBeginTime() + if err != nil { + log.Error("Can not get record begin time", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) + return + } + beginTime, endTime, err := getTimePeroid(ctx, recordBeginTime) + + beginTime = beginTime.AddDate(0, 0, -1) + + queryType := ctx.QueryTrim("type") + + var count int64 + + if queryType == "all" || queryType == "current_year" { + dates := getEndOfMonthDates(beginTime, endTime) + count, _ = models.GetSummaryStatisticByDateCount(dates) + stats, err := models.GetAllSummaryStatisticByDates(dates) + if err != nil { + log.Warn("can not get summary data", err) + } else { + + for i, v := range stats { + if i == 0 { + continue + } + data := ProjectSummaryBaseData{} + setStatisticsData(&data, v, stats[i-1]) + createTime, _ := time.Parse(DATE_FORMAT, v.Date) + data.CreatTime = createTime.Format(MONTH_FORMAT) + datas = append(datas, &data) + } + } + + } else { + count, _ = models.GetSummaryStatisticByTimeCount(beginTime, endTime) + stats, err := models.GetAllSummaryStatisticByTime(beginTime, endTime) + if err != nil { + log.Warn("can not get summary data", err) + } else { + + for i, v := range stats { + if i == 0 { + continue + } + data := ProjectSummaryBaseData{} + setStatisticsData(&data, v, stats[i-1]) + data.CreatTime = v.Date + datas = append(datas, &data) + } + } + + } + + + projectSummaryPeriodData := ProjectSummaryPeriodData{ + TotalCount: count - 1, + RecordBeginTime: recordBeginTime.Format(DATE_FORMAT), + PageRecords: reverse(datas), + } + + ctx.JSON(200, projectSummaryPeriodData) + +} + +func reverse(datas []*ProjectSummaryBaseData ) []*ProjectSummaryBaseData { + for i := 0; i < len(datas)/2; i++ { + j := len(datas) - i - 1 + datas[i], datas[j] = datas[j], datas[i] + } + return datas +} + + + +func setStatisticsData(data *ProjectSummaryBaseData, v *models.SummaryStatistic, stats *models.SummaryStatistic) { + data.NumReposAdd = v.NumRepos - stats.NumRepos + data.NumRepoPublicAdd = v.NumRepoPublic - stats.NumRepoPublic + data.NumRepoPrivateAdd = v.NumRepoPrivate - stats.NumRepoPrivate + data.NumRepoMirrorAdd = v.NumRepoMirror - stats.NumRepoMirror + data.NumRepoForkAdd = v.NumRepoFork - stats.NumRepoFork + data.NumRepoSelfAdd = v.NumRepoSelf - stats.NumRepoSelf + + data.NumRepos = v.NumRepos +} + +func getEndOfMonthDates(beginTime time.Time, endTime time.Time) []string { + var dates = []string{} + date := endOfMonth(beginTime.AddDate(0, -1, 0)) + dates = append(dates, date.Format(DATE_FORMAT)) + + tempDate := endOfMonth(beginTime) + + for { + if tempDate.Before(endTime) { + dates = append(dates, tempDate.Format(DATE_FORMAT)) + tempDate = endOfMonth(tempDate.AddDate(0, 0, 1)) + } else { + break + } + } + + return dates +} + +func endOfMonth(date time.Time) time.Time { + return date.AddDate(0, 1, -date.Day()) +} + func GetAllProjectsPeriodStatistics(ctx *context.Context) { recordBeginTime, err := getRecordBeginTime() @@ -210,6 +384,122 @@ func ServeAllProjectsPeriodStatisticsFile(ctx *context.Context) { } +func GetProjectsSummaryDataFile(ctx *context.Context) { + + recordBeginTime, err := getRecordBeginTime() + if err != nil { + log.Error("Can not get record begin time", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.record_begintime_get_err")) + return + } + beginTime, endTime, err := getTimePeroid(ctx, recordBeginTime) + beginTime = beginTime.AddDate(0, 0, -1) + if err != nil { + log.Error("Parameter is wrong", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.parameter_is_wrong")) + return + } + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + pageSize := 100 + + if err != nil { + log.Error("Can not query the last updated time.", err) + ctx.Error(http.StatusBadRequest, ctx.Tr("repo.last_update_time_error")) + return + } + + var projectAnalysis = ctx.Tr("repo.repo_stat_develop") + fileName := getSummaryFileName(ctx, beginTime, endTime, projectAnalysis) + + f := excelize.NewFile() + + index := f.NewSheet(projectAnalysis) + f.DeleteSheet("Sheet1") + + for k, v := range allProjectsPeriodSummaryHeader(ctx) { + f.SetCellValue(projectAnalysis, k, v) + } + + var total int64 + queryType := ctx.QueryTrim("type") + + var datas = make([]*ProjectSummaryBaseData, 0) + + if queryType == "all" || queryType == "current_year" { + dates := getEndOfMonthDates(beginTime, endTime) + total, _ = models.GetSummaryStatisticByDateCount(dates) + totalPage := getTotalPage(total, pageSize) + + for i := 0; i < totalPage; i++ { + + stats, err := models.GetSummaryStatisticByDates(dates, i+1, pageSize) + if err != nil { + log.Warn("can not get summary data", err) + } else { + for j, v := range stats { + if j == 0 { + continue + } + data := ProjectSummaryBaseData{} + setStatisticsData(&data, v, stats[j-1]) + createTime, _ := time.Parse(DATE_FORMAT, v.Date) + data.CreatTime = createTime.Format(MONTH_FORMAT) + + datas = append(datas, &data) + + } + + } + + } + + } else { + total, _ = models.GetSummaryStatisticByTimeCount(beginTime, endTime) + totalPage := getTotalPage(total, pageSize) + + for i := 0; i < totalPage; i++ { + + stats, err := models.GetSummaryStatisticByTime(beginTime, endTime, i+1, pageSize) + if err != nil { + log.Warn("can not get summary data", err) + } else { + for j, v := range stats { + if j == 0 { + continue + } + data := ProjectSummaryBaseData{} + setStatisticsData(&data, v, stats[j-1]) + data.CreatTime = v.Date + datas = append(datas, &data) + + } + + } + + } + } + row := 2 + datas = reverse(datas) + for _, data := range datas { + for k, v := range allProjectsPeriodSummaryValues(row, data, ctx) { + f.SetCellValue(projectAnalysis, k, v) + } + row++ + } + + f.SetActiveSheet(index) + + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(fileName)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + + f.WriteTo(ctx.Resp) + +} + func ServeAllProjectsOpenIStatisticsFile(ctx *context.Context) { page := ctx.QueryInt("page") @@ -290,6 +580,20 @@ func getFileName(ctx *context.Context, beginTime time.Time, endTime time.Time, p return frontName } +func getSummaryFileName(ctx *context.Context, beginTime time.Time, endTime time.Time, projectAnalysis string) string { + baseName := projectAnalysis + "_" + + if ctx.QueryTrim("type") == "all" { + baseName = baseName + ctx.Tr("repo.all") + } else if ctx.QueryTrim("type") == "current_year" { + baseName = baseName + ctx.Tr("repo.current_year") + } else { + baseName = baseName + beginTime.Format(EXCEL_DATE_FORMAT) + "_" + endTime.AddDate(0, 0, -1).Format(EXCEL_DATE_FORMAT) + } + frontName := baseName + ".xlsx" + return frontName +} + func allProjectsPeroidHeader(ctx *context.Context) map[string]string { return map[string]string{"A1": ctx.Tr("admin.repos.id"), "B1": ctx.Tr("admin.repos.projectName"), "C1": ctx.Tr("repo.owner"), "D1": ctx.Tr("admin.repos.isPrivate"), "E1": ctx.Tr("admin.repos.openi"), "F1": ctx.Tr("admin.repos.visit"), "G1": ctx.Tr("admin.repos.download"), "H1": ctx.Tr("admin.repos.pr"), "I1": ctx.Tr("admin.repos.commit"), @@ -297,6 +601,19 @@ func allProjectsPeroidHeader(ctx *context.Context) map[string]string { } +func allProjectsPeriodSummaryHeader(ctx *context.Context) map[string]string { + + return map[string]string{"A1": ctx.Tr("repo.date"), "B1": ctx.Tr("repo.repo_add"), "C1": ctx.Tr("repo.repo_total"), "D1": ctx.Tr("repo.repo_public_add"), "E1": ctx.Tr("repo.repo_private_add"), "F1": ctx.Tr("repo.repo_self_add"), "G1": ctx.Tr("repo.repo_fork_add"), "H1": ctx.Tr("repo.repo_mirror_add")} + +} + +func allProjectsPeriodSummaryValues(row int, rs *ProjectSummaryBaseData, ctx *context.Context) map[string]string { + + return map[string]string{getCellName("A", row): rs.CreatTime, getCellName("B", row): strconv.FormatInt(rs.NumReposAdd, 10), getCellName("C", row): strconv.FormatInt(rs.NumRepos, 10), getCellName("D", row): strconv.FormatInt(rs.NumRepoPublicAdd, 10), getCellName("E", row): strconv.FormatInt(rs.NumRepoPrivateAdd, 10), + getCellName("F", row): strconv.FormatInt(rs.NumRepoSelfAdd, 10), getCellName("G", row): strconv.FormatInt(rs.NumRepoForkAdd, 10), getCellName("H", row): strconv.FormatInt(rs.NumRepoMirrorAdd, 10), + } +} + func allProjectsPeroidValues(row int, rs *models.RepoStatistic, ctx *context.Context) map[string]string { return map[string]string{getCellName("A", row): strconv.FormatInt(rs.RepoID, 10), getCellName("B", row): rs.DisplayName(), getCellName("C", row): rs.OwnerName, getCellName("D", row): getBoolDisplay(rs.IsPrivate, ctx), getCellName("E", row): strconv.FormatFloat(rs.RadarTotal, 'f', 2, 64), getCellName("F", row): strconv.FormatInt(rs.NumVisits, 10), getCellName("G", row): strconv.FormatInt(rs.NumDownloads, 10), getCellName("H", row): strconv.FormatInt(rs.NumPulls, 10), getCellName("I", row): strconv.FormatInt(rs.NumCommits, 10), diff --git a/routers/home.go b/routers/home.go index 500ffbbd6..e37cacb01 100755 --- a/routers/home.go +++ b/routers/home.go @@ -8,6 +8,7 @@ package routers import ( "bytes" "net/http" + "strconv" "strings" "code.gitea.io/gitea/services/repository" @@ -92,6 +93,8 @@ func setRecommendURL(ctx *context.Context) { ctx.Data["page_dev_yunlao_desc3"] = ctx.Tr("home.page_dev_yunlao_desc3") ctx.Data["page_dev_yunlao_desc4"] = ctx.Tr("home.page_dev_yunlao_desc4") ctx.Data["page_dev_yunlao_apply"] = ctx.Tr("home.page_dev_yunlao_apply") + ctx.Data["page_recommend_activity"] = ctx.Tr("home.page_recommend_activity") + ctx.Data["page_recommend_activity_desc"] = ctx.Tr("home.page_recommend_activity_desc") } func Dashboard(ctx *context.Context) { @@ -602,7 +605,32 @@ func ExploreImages(ctx *context.Context) { ctx.HTML(200, tplExploreImages) } +func ExploreDataAnalysisUserTrend(ctx *context.Context) { + ctx.Data["url_params"]="UserTrend" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} +func ExploreDataAnalysisUserAnalysis(ctx *context.Context) { + ctx.Data["url_params"]="UserAnalysis" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} +func ExploreDataAnalysisProTrend(ctx *context.Context) { + ctx.Data["url_params"]="ProTrend" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} +func ExploreDataAnalysisProAnalysis(ctx *context.Context) { + ctx.Data["url_params"]="ProAnalysis" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} +func ExploreDataAnalysisOverview(ctx *context.Context) { + ctx.Data["url_params"]="Overview" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} +func ExploreDataAnalysisBrainAnalysis(ctx *context.Context) { + ctx.Data["url_params"]="BrainAnalysis" + ctx.HTML(200, tplExploreExploreDataAnalysis) +} func ExploreDataAnalysis(ctx *context.Context) { + ctx.Data["url_params"]="" ctx.HTML(200, tplExploreExploreDataAnalysis) } @@ -640,6 +668,87 @@ func GetRecommendOrg() ([]map[string]interface{}, error) { } return resultOrg, nil } +func GetImageInfo() ([]map[string]interface{}, error) { + url := setting.RecommentRepoAddr + "picture_info" + result, err := repository.RecommendFromPromote(url) + + if err != nil { + return nil, err + } + imageInfo := make([]map[string]interface{}, 0) + for i := 0; i < (len(result) - 1); i++ { + line := result[i] + imageMap := make(map[string]interface{}) + if line[0:4] == "url=" { + url := line[4:] + imageMap["url"] = url + if result[i+1][0:11] == "image_link=" { + image_link := result[i+1][11:] + imageMap["image_link"] = image_link + } + } + imageInfo = append(imageInfo, imageMap) + i = i + 1 + } + return imageInfo, nil +} + +func GetRankUser(index string) ([]map[string]interface{}, error) { + url := setting.RecommentRepoAddr + "user_rank_" + index + result, err := repository.RecommendFromPromote(url) + + if err != nil { + return nil, err + } + resultOrg := make([]map[string]interface{}, 0) + for _, userRank := range result { + tmpIndex := strings.Index(userRank, " ") + userName := userRank + score := 0 + if tmpIndex != -1 { + userName = userRank[0:tmpIndex] + tmpScore, err := strconv.Atoi(userRank[tmpIndex+1:]) + if err != nil { + log.Info("convert to int error.") + } + score = tmpScore + } + user, err := models.GetUserByName(userName) + if err == nil { + userMap := make(map[string]interface{}) + userMap["Name"] = user.Name + userMap["Description"] = user.Description + userMap["FullName"] = user.FullName + userMap["HomeLink"] = user.HomeLink() + userMap["ID"] = user.ID + userMap["Avatar"] = user.RelAvatarLink() + userMap["Score"] = score + resultOrg = append(resultOrg, userMap) + } else { + log.Info("query user error," + err.Error()) + } + } + return resultOrg, nil +} + +func GetImageInfoFromPromote(ctx *context.Context) { + imageInfo, err := GetImageInfo() + if err != nil { + ctx.ServerError("500", err) + return + } + ctx.JSON(200, imageInfo) +} + +func GetUserRankFromPromote(ctx *context.Context) { + index := ctx.Params("index") + resultUserRank, err := GetRankUser(index) + if err != nil { + ctx.ServerError("500", err) + return + } + ctx.JSON(200, resultUserRank) +} func RecommendOrgFromPromote(ctx *context.Context) { resultOrg, err := GetRecommendOrg() diff --git a/routers/private/hook.go b/routers/private/hook.go index 38abd4953..05d7ec1b0 100755 --- a/routers/private/hook.go +++ b/routers/private/hook.go @@ -199,7 +199,6 @@ func HookPreReceive(ctx *macaron.Context, opts private.HookOptions) { env = append(env, private.GitQuarantinePath+"="+opts.GitQuarantinePath) } - for i := range opts.OldCommitIDs { oldCommitID := opts.OldCommitIDs[i] newCommitID := opts.NewCommitIDs[i] @@ -368,6 +367,24 @@ func HookPreReceive(ctx *macaron.Context, opts private.HookOptions) { ctx.PlainText(http.StatusOK, []byte("ok")) } +// HookEnv +func HookEnv(ctx *macaron.Context) { + ownerName := ctx.Params(":owner") + repoName := ctx.Params(":repo") + log.Info("try to get hook env.ownerName=%s repoName=%s", ownerName, repoName) + repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName) + if err != nil { + log.Error("Unable to get repository: %s/%s Error: %v", ownerName, repoName, err) + ctx.JSON(http.StatusInternalServerError, map[string]interface{}{ + "err": err.Error(), + }) + return + } + result := make(map[string]string, 1) + result[models.EnvRepoSize] = fmt.Sprint(repo.Size) + ctx.JSON(http.StatusOK, &private.HookEnvResult{Config: result}) +} + // HookPostReceive updates services and users func HookPostReceive(ctx *macaron.Context, opts private.HookOptions) { ownerName := ctx.Params(":owner") diff --git a/routers/private/internal.go b/routers/private/internal.go index ace25c809..a0c2843be 100755 --- a/routers/private/internal.go +++ b/routers/private/internal.go @@ -38,6 +38,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/hook/pre-receive/:owner/:repo", bind(private.HookOptions{}), HookPreReceive) m.Post("/hook/post-receive/:owner/:repo", bind(private.HookOptions{}), HookPostReceive) m.Post("/hook/set-default-branch/:owner/:repo/:branch", SetDefaultBranch) + m.Get("/hook/env/:owner/:repo", HookEnv) m.Get("/serv/none/:keyid", ServNoCommand) m.Get("/serv/command/:keyid/:owner/:repo", ServCommand) m.Post("/manager/shutdown", Shutdown) diff --git a/routers/repo/editor.go b/routers/repo/editor.go index 8e13735df..b389759f5 100644 --- a/routers/repo/editor.go +++ b/routers/repo/editor.go @@ -5,10 +5,12 @@ package repo import ( + "code.gitea.io/gitea/routers/response" repo_service "code.gitea.io/gitea/services/repository" "encoding/json" "fmt" "io/ioutil" + "net/http" "path" "path/filepath" "strings" @@ -795,3 +797,102 @@ func GetClosestParentWithFiles(treePath string, commit *git.Commit) string { } return treePath } + +// RenameFilePost response for editing file +func RenameFilePost(ctx *context.Context, form auth.RenameRepoFileForm) { + renameFilePost(ctx, form) +} + +func renameFilePost(ctx *context.Context, form auth.RenameRepoFileForm) { + if form.TreePath == "" || form.LastCommit == "" { + ctx.JSON(http.StatusOK, response.ServerError("param error")) + return + } + if form.TreePath == ctx.Repo.TreePath { + ctx.JSON(http.StatusOK, response.Success()) + return + } + + canCommit := renderCommitRights(ctx) + branchName := ctx.Repo.BranchName + if ctx.HasError() { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Flash.ErrorMsg)) + return + } + + // Cannot commit to a an existing branch if user doesn't have rights + if branchName == ctx.Repo.BranchName && !canCommit { + ctx.Data["Err_NewBranchName"] = true + ctx.Data["commit_choice"] = frmCommitChoiceNewBranch + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName))) + return + } + + message := ctx.Tr("repo.editor.rename", ctx.Repo.TreePath, form.TreePath) + + if err := repofiles.RenameRepoFile(ctx.Repo.Repository, ctx.User, &repofiles.RenameRepoFileOptions{ + LastCommitID: form.LastCommit, + BranchName: branchName, + FromTreePath: ctx.Repo.TreePath, + TreePath: form.TreePath, + Message: message, + }); err != nil { + // This is where we handle all the errors thrown by repofiles.CreateOrUpdateRepoFile + if git.IsErrNotExist(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_editing_no_longer_exists", ctx.Repo.TreePath))) + } else if models.IsErrLFSFileLocked(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_editing_no_longer_exists", ctx.Tr("repo.editor.upload_file_is_locked", err.(models.ErrLFSFileLocked).Path, err.(models.ErrLFSFileLocked).UserName)))) + } else if models.IsErrFilenameInvalid(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.filename_is_invalid", form.TreePath))) + } else if models.IsErrFilePathInvalid(err) { + if fileErr, ok := err.(models.ErrFilePathInvalid); ok { + switch fileErr.Type { + case git.EntryModeSymlink: + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path))) + case git.EntryModeTree: + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path))) + case git.EntryModeBlob: + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path))) + default: + ctx.JSON(http.StatusOK, response.ServerError(err.Error())) + } + } else { + ctx.JSON(http.StatusOK, response.ServerError(err.Error())) + } + } else if models.IsErrRepoFileAlreadyExists(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_already_exists", form.TreePath))) + } else if git.IsErrBranchNotExist(err) { + // For when a user adds/updates a file to a branch that no longer exists + if branchErr, ok := err.(git.ErrBranchNotExist); ok { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name))) + } else { + ctx.JSON(http.StatusOK, response.ServerError(err.Error())) + } + } else if models.IsErrBranchAlreadyExists(err) { + // For when a user specifies a new branch that already exists + ctx.Data["Err_NewBranchName"] = true + if branchErr, ok := err.(models.ErrBranchAlreadyExists); ok { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName))) + } else { + ctx.JSON(http.StatusOK, response.ServerError(err.Error())) + ctx.Error(500, err.Error()) + } + } else if models.IsErrCommitIDDoesNotMatch(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_changed_while_renaming"))) + } else if git.IsErrPushOutOfDate(err) { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.file_changed_while_renaming"))) + } else if git.IsErrPushRejected(err) { + errPushRej := err.(*git.ErrPushRejected) + if len(errPushRej.Message) == 0 { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.push_rejected_no_message"))) + } else { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.push_rejected", utils.SanitizeFlashErrorString(errPushRej.Message)))) + } + } else { + ctx.JSON(http.StatusOK, response.ServerError(ctx.Tr("repo.editor.fail_to_update_file", form.TreePath, utils.SanitizeFlashErrorString(err.Error())))) + } + return + } + ctx.JSON(http.StatusOK, response.Success()) + +} diff --git a/routers/repo/repo_summary_statistic.go b/routers/repo/repo_summary_statistic.go index 3af31737c..65ba2cf0b 100644 --- a/routers/repo/repo_summary_statistic.go +++ b/routers/repo/repo_summary_statistic.go @@ -60,6 +60,12 @@ func SummaryStatisticDaily(date string) { } selfRepositoryNumber := repositoryNumer - mirrorRepositoryNumber - forkRepositoryNumber + organizationRepoNumber, err := models.GetAllOrgRepositoriesCount() + if err != nil { + log.Error("can not get org repository number", err) + organizationRepoNumber = 0 + } + //repository size repositorySize, err := models.GetAllRepositoriesSize() if err != nil { @@ -99,6 +105,7 @@ func SummaryStatisticDaily(date string) { NumRepoPrivate: privateRepositoryNumer, NumRepoPublic: publicRepositoryNumer, NumRepoSelf: selfRepositoryNumber, + NumRepoOrg: organizationRepoNumber, NumRepoBigModel: topicsCount[0], NumRepoAI: topicsCount[1], NumRepoVision: topicsCount[2], diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 2280e8288..2823f9c87 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -19,6 +19,57 @@ const ( PAGE_SIZE = 2000 ) +func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { + excelHeader := make([]string, 0) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.date")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.newregistuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.newregistandactiveuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.hasactivateuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.newregistnotactiveuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.averageuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.newuseractiveindex")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalregistuser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalactiveduser")) + excelHeader = append(excelHeader, ctx.Tr("user.metrics.totalhasactivityuser")) + + excelHeaderMap := make(map[string]string, 0) + var i byte + i = 0 + for _, value := range excelHeader { + excelColumn := getColumn(i) + fmt.Sprint(1) + excelHeaderMap[excelColumn] = value + i++ + } + return excelHeaderMap +} + +func writeUserMetricsExcel(row int, xlsx *excelize.File, sheetName string, userMetrics *models.UserMetrics) { + rows := fmt.Sprint(row) + var tmp byte + tmp = 0 + dateTime := time.Unix(userMetrics.CountDate, 0) + //dateTime.Format("2006-01-02 15:04:05") + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, dateTime.Format("2006-01-02 15:04:05")) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.ActivateRegistUser+userMetrics.NotActivateRegistUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.ActivateRegistUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.RegistActivityUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.NotActivateRegistUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, "") + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userMetrics.ActivateIndex)) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalActivateRegistUser) + tmp = tmp + 1 + xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userMetrics.TotalHasActivityUser) +} + func getExcelHeader(ctx *context.Context) map[string]string { excelHeader := make([]string, 0) excelHeader = append(excelHeader, ctx.Tr("user.static.id")) @@ -200,16 +251,61 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac } } -func QueryMetrics(ctx *context.Context) { - startDate := ctx.Query("startDate") - endDate := ctx.Query("endDate") - startTime, _ := time.ParseInLocation("2006-01-02", startDate, time.Local) - endTime, _ := time.ParseInLocation("2006-01-02", endDate, time.Local) - result, count := models.QueryMetrics(startTime.Unix(), endTime.Unix()) - mapInterface := make(map[string]interface{}) - mapInterface["data"] = result - mapInterface["count"] = count - ctx.JSON(http.StatusOK, mapInterface) +func queryMetrics(ctx *context.Context, tableName string, startTime time.Time, endTime time.Time) { + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = setting.UI.IssuePagingNum + } + IsReturnFile := ctx.QueryBool("IsReturnFile") + + var count int64 + result := make([]*models.UserMetrics, 0) + if tableName == "public.user_business_analysis_current_year" { + result = models.QueryMetricsForYear() + count = int64(len(result)) + } else if tableName == "public.user_business_analysis_all" { + result = models.QueryMetricsForAll() + count = int64(len(result)) + } else { + result, count = models.QueryMetricsPage(startTime.Unix(), endTime.Unix(), page, pageSize) + } + if IsReturnFile { + //writer exec file. + xlsx := excelize.NewFile() + sheetName := ctx.Tr("user.metrics.sheetname") + index := xlsx.NewSheet(sheetName) + xlsx.DeleteSheet("Sheet1") + dataHeader := getUserMetricsExcelHeader(ctx) + for k, v := range dataHeader { + //设置单元格的值 + xlsx.SetCellValue(sheetName, k, v) + } + row := 1 + log.Info("return count=" + fmt.Sprint(count)) + for _, userRecord := range result { + row++ + writeUserMetricsExcel(row, xlsx, sheetName, userRecord) + } + //设置默认打开的表单 + xlsx.SetActiveSheet(index) + filename := sheetName + "_" + ctx.Tr("user.static."+tableName) + ".xlsx" + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(filename)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + if _, err := xlsx.WriteTo(ctx.Resp); err != nil { + log.Info("writer exel error." + err.Error()) + } + } else { + mapInterface := make(map[string]interface{}) + mapInterface["data"] = result + mapInterface["count"] = count + ctx.JSON(http.StatusOK, mapInterface) + } + } func QueryRankingList(ctx *context.Context) { @@ -224,34 +320,97 @@ func QueryRankingList(ctx *context.Context) { ctx.JSON(http.StatusOK, mapInterface) } +func QueryUserMetricsCurrentMonth(ctx *context.Context) { + currentTimeNow := time.Now() + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 0, 0, 0, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_current_month", pageStartTime, pageEndTime) +} func QueryUserStaticCurrentMonth(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_current_month", new(models.UserBusinessAnalysisCurrentMonth)) } - +func QueryUserMetricsCurrentWeek(ctx *context.Context) { + currentTimeNow := time.Now() + offset := int(time.Monday - currentTimeNow.Weekday()) + if offset > 0 { + offset = -6 + } + pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, offset) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_current_week", pageStartTime, pageEndTime) +} func QueryUserStaticCurrentWeek(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_current_week", new(models.UserBusinessAnalysisCurrentWeek)) } - +func QueryUserMetricsCurrentYear(ctx *context.Context) { + currentTimeNow := time.Now() + pageStartTime := time.Date(currentTimeNow.Year(), 1, 1, 0, 0, 0, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_current_year", pageStartTime, pageEndTime) +} func QueryUserStaticCurrentYear(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_current_year", new(models.UserBusinessAnalysisCurrentYear)) } - +func QueryUserMetricsLast30Day(ctx *context.Context) { + currentTimeNow := time.Now() + pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, -30) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_last30_day", pageStartTime, pageEndTime) +} func QueryUserStaticLast30Day(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_last30_day", new(models.UserBusinessAnalysisLast30Day)) } - +func QueryUserMetricsLastMonth(ctx *context.Context) { + currentTimeNow := time.Now() + thisMonth := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 0, 0, 0, 0, currentTimeNow.Location()) + pageStartTime := thisMonth.AddDate(0, -1, 0) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), 1, 23, 59, 59, 0, currentTimeNow.Location()).AddDate(0, 0, -1) + queryMetrics(ctx, "public.user_business_analysis_last_month", pageStartTime, pageEndTime) +} func QueryUserStaticLastMonth(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_last_month", new(models.UserBusinessAnalysisLastMonth)) } - +func QueryUserMetricsYesterday(ctx *context.Context) { + currentTimeNow := time.Now() + pageStartTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 0, 0, 0, 0, time.Local) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_yesterday", pageStartTime, pageEndTime) +} func QueryUserStaticYesterday(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_yesterday", new(models.UserBusinessAnalysisYesterday)) } - +func QueryUserMetricsAll(ctx *context.Context) { + currentTimeNow := time.Now() + pageStartTime := time.Date(2022, 4, 5, 0, 0, 0, 0, currentTimeNow.Location()) + pageEndTime := time.Date(currentTimeNow.Year(), currentTimeNow.Month(), currentTimeNow.Day(), 23, 59, 59, 0, currentTimeNow.Location()) + queryMetrics(ctx, "public.user_business_analysis_all", pageStartTime, pageEndTime) +} func QueryUserStaticAll(ctx *context.Context) { queryUserDataPage(ctx, "public.user_business_analysis_all", new(models.UserBusinessAnalysisAll)) } +func QueryUserMetricDataPage(ctx *context.Context) { + startDate := ctx.Query("startDate") + endDate := ctx.Query("endDate") + startTime, _ := time.ParseInLocation("2006-01-02", startDate, time.Local) + endTime, _ := time.ParseInLocation("2006-01-02", endDate, time.Local) + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = setting.UI.IssuePagingNum + } + result, count := models.QueryMetricsPage(startTime.Unix(), endTime.Unix(), page, pageSize) + + mapInterface := make(map[string]interface{}) + mapInterface["data"] = result + mapInterface["count"] = count + ctx.JSON(http.StatusOK, mapInterface) +} + func QueryUserStaticDataPage(ctx *context.Context) { startDate := ctx.Query("startDate") endDate := ctx.Query("endDate") diff --git a/routers/repo/view.go b/routers/repo/view.go index b28e21aa1..6880d5261 100755 --- a/routers/repo/view.go +++ b/routers/repo/view.go @@ -608,6 +608,11 @@ func getContributorInfo(contributorInfos []*ContributorInfo, email string) *Cont // Home render repository home page func Home(ctx *context.Context) { + if ctx.Repo.CanEnableEditor() { + ctx.Data["CanEditFile"] = true + } else { + ctx.Data["CanEditFile"] = false + } if len(ctx.Repo.Units) > 0 { //get repo contributors info contributors, err := git.GetContributors(ctx.Repo.Repository.RepoPath(), ctx.Repo.BranchName) diff --git a/routers/response/response.go b/routers/response/response.go new file mode 100644 index 000000000..edd3b9cca --- /dev/null +++ b/routers/response/response.go @@ -0,0 +1,32 @@ +package response + +const ( + RESPONSE_CODE_SUCCESS = 0 + RESPONSE_MSG_SUCCESS = "ok" + RESPONSE_CODE_ERROR_DEFAULT = 99 +) + +type AiforgeResponse struct { + Code int + Msg string + Data interface{} +} + +func Success() *AiforgeResponse { + return &AiforgeResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS} +} + +func Error(code int, msg string) *AiforgeResponse { + return &AiforgeResponse{Code: code, Msg: msg} +} + +func ServerError(msg string) *AiforgeResponse { + return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg} +} + +func SuccessWithData(data interface{}) *AiforgeResponse { + return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: RESPONSE_MSG_SUCCESS, Data: data} +} +func ErrorWithData(code int, msg string, data interface{}) *AiforgeResponse { + return &AiforgeResponse{Code: code, Msg: msg, Data: data} +} diff --git a/routers/routes/routes.go b/routers/routes/routes.go index 912ed6238..4c3f5f472 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -325,6 +325,8 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/action/notification", routers.ActionNotification) m.Get("/recommend/org", routers.RecommendOrgFromPromote) m.Get("/recommend/repo", routers.RecommendRepoFromPromote) + m.Get("/recommend/userrank/:index", routers.GetUserRankFromPromote) + m.Get("/recommend/imageinfo", routers.GetImageInfoFromPromote) m.Post("/all/search/", routers.Search) m.Get("/all/search/", routers.EmptySearch) m.Get("/all/dosearch/", routers.SearchApi) @@ -344,6 +346,13 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/code", routers.ExploreCode) m.Get("/images", routers.ExploreImages) m.Get("/data_analysis", routers.ExploreDataAnalysis) + m.Get("/data_analysis/UserTrend", routers.ExploreDataAnalysisUserTrend) + m.Get("/data_analysis/UserAnalysis", routers.ExploreDataAnalysisUserAnalysis) + m.Get("/data_analysis/ProAnalysis", routers.ExploreDataAnalysisProAnalysis) + m.Get("/data_analysis/ProTrend", routers.ExploreDataAnalysisProTrend) + m.Get("/data_analysis/Overview", routers.ExploreDataAnalysisOverview) + m.Get("/data_analysis/BrainAnalysis", routers.ExploreDataAnalysisBrainAnalysis) + }, ignSignIn) m.Combo("/install", routers.InstallInit).Get(routers.Install). Post(bindIgnErr(auth.InstallForm{}), routers.InstallPost) @@ -933,6 +942,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Combo("/_upload/*", repo.MustBeAbleToUpload). Get(repo.UploadFile). Post(bindIgnErr(auth.UploadRepoFileForm{}), repo.UploadFilePost) + m.Post("/_rename/*", bindIgnErr(auth.RenameRepoFileForm{}), repo.RenameFilePost) }, context.RepoRefByType(context.RepoRefBranch), repo.MustBeEditable) m.Group("", func() { m.Post("/upload-file", repo.UploadFileToServer) diff --git a/services/repository/repository.go b/services/repository/repository.go index d0cd52653..b9abbeb6f 100644 --- a/services/repository/repository.go +++ b/services/repository/repository.go @@ -19,6 +19,8 @@ import ( pull_service "code.gitea.io/gitea/services/pull" ) +const SHELL_FLAG_ON = 1 + // CreateRepository creates a repository for the user/organization. func CreateRepository(doer, owner *models.User, opts models.CreateRepoOptions) (*models.Repository, error) { repo, err := repo_module.CreateRepository(doer, owner, opts) diff --git a/templates/explore/data_analysis.tmpl b/templates/explore/data_analysis.tmpl index 34ad3f018..5c878787d 100755 --- a/templates/explore/data_analysis.tmpl +++ b/templates/explore/data_analysis.tmpl @@ -1,15 +1,21 @@ {{template "base/head_fluid" .}} +
- +
{{template "base/footer_fluid" .}} - + \ No newline at end of file diff --git a/templates/explore/repo_right.tmpl b/templates/explore/repo_right.tmpl index 12a3ffadb..5e05e797b 100644 --- a/templates/explore/repo_right.tmpl +++ b/templates/explore/repo_right.tmpl @@ -1,4 +1,4 @@ - +