diff --git a/README.md b/README.md
index 061ece70c..99f6a6e8c 100644
--- a/README.md
+++ b/README.md
@@ -41,6 +41,7 @@
## 授权许可
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://git.openi.org.cn/OpenI/aiforge/src/branch/develop/LICENSE) 文件中。
+
## 需要帮助?
如果您在使用或者开发过程中遇到问题,可以在以下渠道咨询:
- 点击[这里](https://git.openi.org.cn/OpenI/aiforge/issues)在线提交问题(点击页面右上角绿色按钮**创建任务**)
@@ -49,3 +50,8 @@
## 启智社区小白训练营:
- 结合案例给大家详细讲解如何使用社区平台,帮助无技术背景的小白成长为启智社区达人 (https://git.openi.org.cn/zeizei/OpenI_Learning)
+
+## 平台引用
+如果本平台对您的科研工作提供了帮助,可在论文致谢中加入:
+英文版:```Thanks for the support provided by OpenI Community (https://git.openi.org.cn).```
+中文版:```感谢启智社区提供的技术支持(https://git.openi.org.cn)。```
\ No newline at end of file
diff --git a/custom/public/css/git.openi.css b/custom/public/css/git.openi.css
index c6ada7b28..a4920eec0 100644
--- a/custom/public/css/git.openi.css
+++ b/custom/public/css/git.openi.css
@@ -44,12 +44,6 @@
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
}
-.ui.label{
- font-weight: normal;
-}
-.active {
- color: #0366D6 !important;
-}
.opacity5{ opacity:0.5;}
.radius15{ border-radius:1.5rem !important; }
@@ -287,70 +281,6 @@
position: relative;
}
-/**seach**/
-/**搜索导航条适配窄屏**/
-.seachnav{
- overflow-x: auto;
- overflow-y: hidden;
- scrollbar-width: none; /* firefox */
- -ms-overflow-style: none; /* IE 10+ */
-}
-.seachnav::-webkit-scrollbar {
- display: none; /* Chrome Safari */
-}
-.ui.green.button, .ui.green.buttons .button{
- background-color: #5BB973;
-}
-.seach .repos--seach{
- padding-bottom: 0;
- border-bottom: none;
-}
-.seach .ui.secondary.pointing.menu{
- border-bottom: none;
-}
-.seach .ui.secondary.pointing.menu .item > i{
- margin-right: 5px;
-}
-.seach .ui.secondary.pointing.menu .active.item{
- border-bottom-width: 2px;
- margin: 0 0 -1px;
-}
-.seach .ui.menu .active.item>.label {
- background: #1684FC;
- color: #FFF;
-}
-.seach .ui.menu .item>.label:not(.active.item>.label) {
- background: #e8e8e8;
- color: rgba(0,0,0,.6);
-}
-
-.highlight{
- color: red;
-}
-.ui.list .list>.item>img.image+.content, .ui.list>.item>img.image+.content {
- width: calc(100% - 3.0em);
- margin-left: 0;
-}
-
-.seach .ui.list .list>.item .header, .seach .ui.list>.item .header{
- margin-bottom: 0.5em;
- font-size: 1.4rem !important;
- font-weight: normal;
-}
-.seach .time, .seach .time a{
- font-size: 12px;
- color: grey;
-}
-
-.seach .list .item.members .ui.avatar.image {
- width: 3.2em;
- height: 3.2em;
-}
-.ui.list .list>.item.members>img.image+.content, .ui.list>.item.members>img.image+.content {
- width: calc(100% - 4.0em);
- margin-left: 0;
-}
-
@media only screen and (max-width: 767px) {
.am-mt-30{ margin-top: 1.5rem !important;}
.ui.secondary.hometop.segment{
diff --git a/models/action.go b/models/action.go
index 2a9d88399..9b92b4192 100755
--- a/models/action.go
+++ b/models/action.go
@@ -57,6 +57,7 @@ const (
ActionCreateInferenceTask // 28
ActionCreateBenchMarkTask //29
ActionCreateNewModelTask //30
+ ActionCreateGPUTrainTask //31
)
// Action represents user operation type and other information to
diff --git a/models/attachment.go b/models/attachment.go
index c322d391b..a3fc6fa01 100755
--- a/models/attachment.go
+++ b/models/attachment.go
@@ -9,6 +9,7 @@ import (
"fmt"
"io"
"path"
+ "strings"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/obs"
@@ -18,6 +19,7 @@ import (
"code.gitea.io/gitea/modules/timeutil"
gouuid "github.com/satori/go.uuid"
+ "xorm.io/builder"
"xorm.io/xorm"
)
@@ -38,6 +40,7 @@ type Attachment struct {
UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
CommentID int64
Name string
+ Description string `xorm:"TEXT"`
DownloadCount int64 `xorm:"DEFAULT 0"`
Size int64 `xorm:"DEFAULT 0"`
IsPrivate bool `xorm:"DEFAULT false"`
@@ -47,6 +50,7 @@ type Attachment struct {
FileChunk *FileChunk `xorm:"-"`
CanDel bool `xorm:"-"`
+ Uploader *User `xorm:"-"`
}
type AttachmentUsername struct {
@@ -54,6 +58,27 @@ type AttachmentUsername struct {
Name string
}
+type AttachmentInfo struct {
+ Attachment `xorm:"extends"`
+ Repo *Repository `xorm:"extends"`
+ RelAvatarLink string `xorm:"extends"`
+ UserName string `xorm:"extends"`
+}
+
+type AttachmentsOptions struct {
+ ListOptions
+ DatasetIDs []int64
+ DecompressState int
+ Type int
+ UploaderID int64
+ NeedDatasetIDs bool
+ NeedIsPrivate bool
+ IsPrivate bool
+ JustNeedZipFile bool
+ NeedRepoInfo bool
+ Keyword string
+}
+
func (a *Attachment) AfterUpdate() {
if a.DatasetID > 0 {
datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment))
@@ -326,6 +351,18 @@ func DeleteAttachmentsByComment(commentID int64, remove bool) (int, error) {
func UpdateAttachment(atta *Attachment) error {
return updateAttachment(x, atta)
}
+func UpdateAttachmentDescription(atta *Attachment) error {
+ return updateAttachmentDescription(x, atta)
+}
+
+func updateAttachmentDescription(e Engine, atta *Attachment) error {
+ var sess *xorm.Session
+
+ sess = e.ID(atta.ID)
+
+ _, err := sess.Cols("description").Update(atta)
+ return err
+}
func updateAttachment(e Engine, atta *Attachment) error {
var sess *xorm.Session
@@ -503,3 +540,98 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) {
func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}
+
+func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+
+ var cond = builder.NewCond()
+ if opts.NeedDatasetIDs {
+ cond = cond.And(
+ builder.In("attachment.dataset_id", opts.DatasetIDs),
+ )
+ }
+
+ if opts.UploaderID > 0 {
+ cond = cond.And(
+ builder.Eq{"attachment.uploader_id": opts.UploaderID},
+ )
+ }
+
+ if (opts.Type) >= 0 {
+ cond = cond.And(
+ builder.Eq{"attachment.type": opts.Type},
+ )
+ }
+
+ if opts.NeedIsPrivate {
+ cond = cond.And(
+ builder.Eq{"attachment.is_private": opts.IsPrivate},
+ )
+ }
+
+ if opts.JustNeedZipFile {
+ var DecompressState []int32
+ DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed)
+ cond = cond.And(
+ builder.In("attachment.decompress_state", DecompressState),
+ )
+ }
+
+ var count int64
+ var err error
+ if len(opts.Keyword) == 0 {
+ count, err = sess.Where(cond).Count(new(Attachment))
+ } else {
+ lowerKeyWord := strings.ToLower(opts.Keyword)
+
+ cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord}))
+ count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo))
+
+ }
+
+ if err != nil {
+ return nil, 0, fmt.Errorf("Count: %v", err)
+ }
+
+ if opts.Page >= 0 && opts.PageSize > 0 {
+ var start int
+ if opts.Page == 0 {
+ start = 0
+ } else {
+ start = (opts.Page - 1) * opts.PageSize
+ }
+ sess.Limit(opts.PageSize, start)
+ }
+
+ sess.OrderBy("attachment.created_unix DESC")
+ attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum)
+ if err := sess.Table(&Attachment{}).Where(cond).
+ Find(&attachments); err != nil {
+ return nil, 0, fmt.Errorf("Find: %v", err)
+ }
+
+ if opts.NeedRepoInfo {
+ for _, attachment := range attachments {
+ dataset, err := GetDatasetByID(attachment.DatasetID)
+ if err != nil {
+ return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err)
+ }
+ repo, err := GetRepositoryByID(dataset.RepoID)
+ if err == nil {
+ attachment.Repo = repo
+ } else {
+ return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err)
+ }
+ user, err := GetUserByID(attachment.UploaderID)
+ if err == nil {
+ attachment.RelAvatarLink = user.RelAvatarLink()
+ attachment.UserName = user.Name
+ } else {
+ return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err)
+ }
+ }
+ }
+
+ return attachments, count, nil
+}
diff --git a/models/base_message.go b/models/base_message.go
new file mode 100644
index 000000000..37f7668ad
--- /dev/null
+++ b/models/base_message.go
@@ -0,0 +1,16 @@
+package models
+
+type BaseMessage struct {
+ Code int
+ Message string
+}
+
+var BaseOKMessage = BaseMessage{
+ 0, "",
+}
+
+func BaseErrorMessage(message string) BaseMessage {
+ return BaseMessage{
+ 1, message,
+ }
+}
diff --git a/models/cloudbrain.go b/models/cloudbrain.go
index f501d8e91..1662dcd96 100755
--- a/models/cloudbrain.go
+++ b/models/cloudbrain.go
@@ -1,6 +1,7 @@
package models
import (
+ "code.gitea.io/gitea/modules/util"
"encoding/json"
"fmt"
"strconv"
@@ -20,8 +21,16 @@ type JobType string
type ModelArtsJobStatus string
const (
+ TypeCloudBrainOne int = iota
+ TypeCloudBrainTwo
+
+ TypeCloudBrainAll = -1
+)
+
+const (
NPUResource = "NPU"
GPUResource = "CPU/GPU"
+ AllResource = "all"
//notebook storage category
EVSCategory = "EVS"
@@ -86,6 +95,8 @@ const (
ModelArtsTrainJobCheckRunning ModelArtsJobStatus = "CHECK_RUNNING" //审核作业正在运行中
ModelArtsTrainJobCheckRunningCompleted ModelArtsJobStatus = "CHECK_RUNNING_COMPLETED" //审核作业已经完成
ModelArtsTrainJobCheckFailed ModelArtsJobStatus = "CHECK_FAILED" //审核作业失败
+
+ DURATION_STR_ZERO = "00:00:00"
)
type Cloudbrain struct {
@@ -102,15 +113,15 @@ type Cloudbrain struct {
ContainerIp string
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
- Duration int64
- TrainJobDuration string
- Image string //镜像名称
- GpuQueue string //GPU类型即GPU队列
- ResourceSpecId int //GPU规格id
- DeletedAt time.Time `xorm:"deleted"`
- CanDebug bool `xorm:"-"`
- CanDel bool `xorm:"-"`
- CanModify bool `xorm:"-"`
+ Duration int64 `xorm:"DEFAULT 0"` //运行时长 单位秒
+ TrainJobDuration string `xorm:"DEFAULT '00:00:00'"`
+ Image string //镜像名称
+ GpuQueue string //GPU类型即GPU队列
+ ResourceSpecId int //GPU规格id
+ DeletedAt time.Time `xorm:"deleted"`
+ CanDebug bool `xorm:"-"`
+ CanDel bool `xorm:"-"`
+ CanModify bool `xorm:"-"`
Type int
BenchmarkTypeID int
BenchmarkChildTypeID int
@@ -150,6 +161,64 @@ type Cloudbrain struct {
Repo *Repository `xorm:"-"`
BenchmarkTypeName string `xorm:"-"`
BenchmarkTypeRankLink string `xorm:"-"`
+ StartTime timeutil.TimeStamp
+ EndTime timeutil.TimeStamp
+}
+
+func (task *Cloudbrain) ComputeAndSetDuration() {
+ var d int64
+ if task.StartTime == 0 {
+ d = 0
+ } else if task.EndTime == 0 {
+ if !task.IsTerminal() {
+ d = time.Now().Unix() - task.StartTime.AsTime().Unix()
+ }
+ } else {
+ d = task.EndTime.AsTime().Unix() - task.StartTime.AsTime().Unix()
+ }
+
+ if d < 0 {
+ d = 0
+ }
+ task.Duration = d
+ task.TrainJobDuration = ConvertDurationToStr(d)
+}
+
+func (task *Cloudbrain) IsTerminal() bool {
+ status := task.Status
+ return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled) || status == string(ModelArtsStopped) || status == string(JobStopped) || status == string(JobFailed) || status == string(JobSucceeded)
+}
+
+func ConvertDurationToStr(duration int64) string {
+ if duration == 0 {
+ return DURATION_STR_ZERO
+ }
+ return util.AddZero(duration/3600) + ":" + util.AddZero(duration%3600/60) + ":" + util.AddZero(duration%60)
+}
+
+func IsTrainJobTerminal(status string) bool {
+ return status == string(ModelArtsTrainJobCompleted) || status == string(ModelArtsTrainJobFailed) || status == string(ModelArtsTrainJobKilled)
+}
+
+func IsModelArtsDebugJobTerminal(status string) bool {
+ return status == string(ModelArtsStopped)
+}
+
+func IsCloudBrainOneDebugJobTerminal(status string) bool {
+ return status == string(JobStopped) || status == string(JobFailed) || status == string(JobSucceeded)
+}
+
+func ParseAndSetDurationFromCloudBrainOne(result JobResultPayload, task *Cloudbrain) {
+ isActivated := result.JobStatus.CreatedTime > 0
+ if task.StartTime == 0 && isActivated {
+ task.StartTime = timeutil.TimeStamp(result.JobStatus.CreatedTime / 1000)
+ }
+ if task.EndTime == 0 && IsCloudBrainOneDebugJobTerminal(task.Status) && isActivated {
+ if result.JobStatus.CompletedTime > 0 {
+ task.EndTime = timeutil.TimeStamp(result.JobStatus.CompletedTime / 1000)
+ }
+ }
+ task.ComputeAndSetDuration()
}
type CloudbrainInfo struct {
@@ -319,7 +388,7 @@ type JobResultPayload struct {
AppProgress string `json:"appProgress"`
AppTrackingURL string `json:"appTrackingUrl"`
AppLaunchedTime int64 `json:"appLaunchedTime"`
- AppCompletedTime interface{} `json:"appCompletedTime"`
+ AppCompletedTime int64 `json:"appCompletedTime"`
AppExitCode int `json:"appExitCode"`
AppExitDiagnostics string `json:"appExitDiagnostics"`
AppExitType interface{} `json:"appExitType"`
@@ -1019,6 +1088,7 @@ type GetTrainJobResult struct {
NasShareAddr string `json:"nas_share_addr"`
DatasetName string
ModelMetricList string `json:"model_metric_list"` //列表里包含f1_score,recall,precision,accuracy,若有的话
+ StartTime int64 `json:"start_time"` //训练作业开始时间。
}
type GetTrainJobLogResult struct {
@@ -1283,6 +1353,7 @@ func CloudbrainsVersionList(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int, e
}
func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) {
+ cloudbrain.TrainJobDuration = DURATION_STR_ZERO
if _, err = x.Insert(cloudbrain); err != nil {
return err
}
@@ -1327,13 +1398,13 @@ func GetCloudbrainByJobIDAndIsLatestVersion(jobID string, isLatestVersion string
func GetCloudbrainsNeededStopByUserID(userID int64) ([]*Cloudbrain, error) {
cloudBrains := make([]*Cloudbrain, 0)
- err := x.Cols("job_id", "status", "type", "job_type", "version_id").Where("user_id=? AND status !=?", userID, string(JobStopped)).Find(&cloudBrains)
+ err := x.Cols("job_id", "status", "type", "job_type", "version_id", "start_time").Where("user_id=? AND status !=?", userID, string(JobStopped)).Find(&cloudBrains)
return cloudBrains, err
}
func GetCloudbrainsNeededStopByRepoID(repoID int64) ([]*Cloudbrain, error) {
cloudBrains := make([]*Cloudbrain, 0)
- err := x.Cols("job_id", "status", "type", "job_type", "version_id").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains)
+ err := x.Cols("job_id", "status", "type", "job_type", "version_id", "start_time").Where("repo_id=? AND status !=?", repoID, string(JobStopped)).Find(&cloudBrains)
return cloudBrains, err
}
@@ -1377,7 +1448,7 @@ func UpdateTrainJobVersion(job *Cloudbrain) error {
func updateJobTrainVersion(e Engine, job *Cloudbrain) error {
var sess *xorm.Session
sess = e.Where("job_id = ? AND version_name=?", job.JobID, job.VersionName)
- _, err := sess.Cols("status", "train_job_duration").Update(job)
+ _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job)
return err
}
@@ -1427,6 +1498,15 @@ func GetCloudBrainUnStoppedJob() ([]*Cloudbrain, error) {
Find(&cloudbrains)
}
+func GetStoppedJobWithNoDurationJob() ([]*Cloudbrain, error) {
+ cloudbrains := make([]*Cloudbrain, 0)
+ return cloudbrains, x.
+ In("status", ModelArtsTrainJobCompleted, ModelArtsTrainJobFailed, ModelArtsTrainJobKilled, ModelArtsStopped, JobStopped, JobFailed, JobSucceeded).
+ Where("train_job_duration is null or train_job_duration = '' ").
+ Limit(100).
+ Find(&cloudbrains)
+}
+
func GetCloudbrainCountByUserID(userID int64, jobType string) (int, error) {
count, err := x.In("status", JobWaiting, JobRunning).And("job_type = ? and user_id = ? and type = ?", jobType, userID, TypeCloudBrainOne).Count(new(Cloudbrain))
return int(count), err
@@ -1457,7 +1537,7 @@ func UpdateInferenceJob(job *Cloudbrain) error {
func updateInferenceJob(e Engine, job *Cloudbrain) error {
var sess *xorm.Session
sess = e.Where("job_id = ?", job.JobID)
- _, err := sess.Cols("status", "train_job_duration").Update(job)
+ _, err := sess.Cols("status", "train_job_duration", "duration", "start_time", "end_time").Update(job)
return err
}
func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) {
diff --git a/models/dataset.go b/models/dataset.go
index 2b3de752b..c0d82d250 100755
--- a/models/dataset.go
+++ b/models/dataset.go
@@ -22,6 +22,7 @@ type Dataset struct {
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64
+ NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
License string
Task string
ReleaseID int64 `xorm:"INDEX"`
@@ -35,6 +36,11 @@ type Dataset struct {
Attachments []*Attachment `xorm:"-"`
}
+type DatasetWithStar struct {
+ Dataset
+ IsStaring bool
+}
+
func (d *Dataset) IsPrivate() bool {
switch d.Status {
case DatasetStatusPrivate:
@@ -91,33 +97,37 @@ type SearchDatasetOptions struct {
OwnerID int64
RepoID int64
IncludePublic bool
+ Category string
+ Task string
+ License string
ListOptions
SearchOrderBy
IsOwner bool
}
func CreateDataset(dataset *Dataset) (err error) {
- if _, err = x.Insert(dataset); err != nil {
+
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
return err
}
- return nil
-}
-
-func CreateDefaultDatasetToRepo(repo *Repository) (err error) {
- dataset := &Dataset{RepoID: repo.ID}
- has, err := x.Get(dataset)
+ datasetByRepoId := &Dataset{RepoID: dataset.RepoID}
+ has, err := sess.Get(datasetByRepoId)
if err != nil {
return err
}
- if !has {
- dataset.Status = DatasetStatusPrivate
- dataset.Title = repo.Name
- if err = CreateDataset(dataset); err != nil {
- return err
- }
+ if has {
+ return fmt.Errorf("The dataset already exists.")
}
- return nil
+
+ if _, err = sess.Insert(dataset); err != nil {
+ return err
+ }
+ return sess.Commit()
+
}
func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) {
@@ -130,7 +140,18 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
cond = cond.And(builder.Neq{"dataset.status": DatasetStatusDeleted})
if len(opts.Keyword) > 0 {
- cond = cond.And(builder.Like{"dataset.title", opts.Keyword})
+ cond = cond.And(builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword}))
+ }
+
+ if len(opts.Category) > 0 {
+ cond = cond.And(builder.Eq{"dataset.category": opts.Category})
+ }
+
+ if len(opts.Task) > 0 {
+ cond = cond.And(builder.Eq{"dataset.task": opts.Task})
+ }
+ if len(opts.License) > 0 {
+ cond = cond.And(builder.Eq{"dataset.license": opts.License})
}
if opts.RepoID > 0 {
@@ -139,12 +160,13 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
if opts.IncludePublic {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
+ cond = cond.And(builder.Eq{"attachment.is_private": false})
if opts.OwnerID > 0 {
if len(opts.Keyword) == 0 {
cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID})
} else {
subCon := builder.NewCond()
- subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Like{"dataset.title", opts.Keyword})
+ subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}, builder.Or(builder.Like{"dataset.title", opts.Keyword}, builder.Like{"dataset.description", opts.Keyword}))
cond = cond.Or(subCon)
}
@@ -153,6 +175,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})
if !opts.IsOwner {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
+ cond = cond.And(builder.Eq{"attachment.is_private": false})
}
}
@@ -169,14 +192,20 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da
defer sess.Close()
datasets := make(DatasetList, 0, opts.PageSize)
+ selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars"
- count, err := sess.Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).Count(new(Dataset))
+ count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id").
+ Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
+ Where(cond).Count(new(Dataset))
if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}
- sess.Select("dataset.*").Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).OrderBy(opts.SearchOrderBy.String())
+ sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id").
+ Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
+ Where(cond).OrderBy(opts.SearchOrderBy.String())
+
if opts.PageSize > 0 {
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
}
@@ -231,13 +260,23 @@ func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *Us
sort.Sort(sortedRels)
// Select attachments
- err = e.
- Asc("dataset_id").
- In("dataset_id", sortedRels.ID).
- And("type = ?", typeCloudBrain).
- Find(&attachments, Attachment{})
- if err != nil {
- return err
+ if typeCloudBrain == -1 {
+ err = e.
+ Asc("dataset_id").
+ In("dataset_id", sortedRels.ID).
+ Find(&attachments, Attachment{})
+ if err != nil {
+ return err
+ }
+ } else {
+ err = e.
+ Asc("dataset_id").
+ In("dataset_id", sortedRels.ID).
+ And("type = ?", typeCloudBrain).
+ Find(&attachments, Attachment{})
+ if err != nil {
+ return err
+ }
}
// merge join
@@ -301,9 +340,6 @@ func GetDatasetByID(id int64) (*Dataset, error) {
}
func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
- if err := CreateDefaultDatasetToRepo(repo); err != nil {
- return nil, err
- }
dataset := &Dataset{RepoID: repo.ID}
has, err := x.Get(dataset)
if err != nil {
@@ -312,10 +348,16 @@ func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
if has {
return dataset, nil
} else {
- return nil, errors.New("Not Found")
+ return nil, ErrNotExist{repo.ID}
}
}
+func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) {
+ datasetStars := make([]*DatasetStar, 0)
+ err := x.Cols("id", "uid", "dataset_id", "created_unix").Where("uid=?", user.ID).Find(&datasetStars)
+ return datasetStars, err
+}
+
func DeleteDataset(datasetID int64, uid int64) error {
var err error
sess := x.NewSession()
diff --git a/models/dataset_star.go b/models/dataset_star.go
new file mode 100644
index 000000000..4b22c2855
--- /dev/null
+++ b/models/dataset_star.go
@@ -0,0 +1,70 @@
+package models
+
+import "code.gitea.io/gitea/modules/timeutil"
+
+type DatasetStar struct {
+ ID int64 `xorm:"pk autoincr"`
+ UID int64 `xorm:"UNIQUE(s)"`
+ DatasetID int64 `xorm:"UNIQUE(s)"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+}
+
+// StarRepo or unstar repository.
+func StarDataset(userID, datasetID int64, star bool) error {
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ if star {
+ if isDatasetStaring(sess, userID, datasetID) {
+ return nil
+ }
+
+ if _, err := sess.Insert(&DatasetStar{UID: userID, DatasetID: datasetID}); err != nil {
+ return err
+ }
+ if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars + 1 WHERE id = ?", datasetID); err != nil {
+ return err
+ }
+ if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars + 1 WHERE id = ?", userID); err != nil {
+ return err
+ }
+ } else {
+ if !isDatasetStaring(sess, userID, datasetID) {
+ return nil
+ }
+
+ if _, err := sess.Delete(&DatasetStar{0, userID, datasetID, 0}); err != nil {
+ return err
+ }
+ if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars - 1 WHERE id = ?", datasetID); err != nil {
+ return err
+ }
+ if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars - 1 WHERE id = ?", userID); err != nil {
+ return err
+ }
+ }
+
+ return sess.Commit()
+}
+
+func IsDatasetStaringByRepoId(userID, repoID int64) bool {
+ dataset, _ := GetDatasetByRepo(&Repository{ID: repoID})
+ if dataset == nil {
+ return false
+ }
+ return isDatasetStaring(x, userID, dataset.ID)
+}
+
+func IsDatasetStaring(userID, datasetID int64) bool {
+ return isDatasetStaring(x, userID, datasetID)
+
+}
+
+func isDatasetStaring(e Engine, userID, datasetID int64) bool {
+ has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0})
+ return has
+}
diff --git a/models/dbsql/dataset_foreigntable_for_es.sql b/models/dbsql/dataset_foreigntable_for_es.sql
new file mode 100644
index 000000000..815b89d02
--- /dev/null
+++ b/models/dbsql/dataset_foreigntable_for_es.sql
@@ -0,0 +1,186 @@
+DROP FOREIGN TABLE public.dataset_es;
+CREATE FOREIGN TABLE public.dataset_es
+(
+ id bigint NOT NULL,
+ title character varying(255),
+ status integer,
+ category character varying(255),
+ description text,
+ download_times bigint,
+ license character varying(255),
+ task character varying(255),
+ release_id bigint,
+ user_id bigint,
+ repo_id bigint,
+ created_unix bigint,
+ updated_unix bigint,
+ file_name text,
+ file_desc text
+)SERVER multicorn_es
+OPTIONS
+ (
+ host '192.168.207.94',
+ port '9200',
+ index 'dataset-es-index',
+ rowid_column 'id',
+ default_sort '_id'
+ )
+;
+DELETE FROM public.dataset_es;
+ INSERT INTO public.dataset_es(
+ id,
+ title,
+ status,
+ category,
+ description,
+ download_times,
+ license, task,
+ release_id,
+ user_id,
+ repo_id,
+ created_unix,
+ updated_unix,
+ file_name,
+ file_desc
+ )
+ SELECT
+ b.id,
+ b.title,
+ b.status,
+ b.category,
+ b.description,
+ b.download_times,
+ b.license,
+ b.task,
+ b.release_id,
+ b.user_id,
+ b.repo_id,
+ b.created_unix,
+ b.updated_unix,
+ (select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment a where a.dataset_id=b.id and a.is_private=false),
+ (select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment a where a.dataset_id=b.id and a.is_private=false)
+ FROM public.dataset b,public.repository c where b.repo_id=c.id and c.is_private=false;
+
+
+DROP TRIGGER IF EXISTS es_insert_dataset on public.dataset;
+
+CREATE OR REPLACE FUNCTION public.insert_dataset_data() RETURNS trigger AS
+$def$
+ DECLARE
+ privateValue boolean=false;
+ BEGIN
+ select into privateValue is_private from public.repository where id=NEW.repo_id;
+ if not privateValue then
+ INSERT INTO public.dataset_es(
+ id,
+ title,
+ status,
+ category,
+ description,
+ download_times,
+ license,
+ task,
+ release_id,
+ user_id,
+ repo_id,
+ created_unix,
+ updated_unix)
+ VALUES (
+ NEW.id,
+ NEW.title,
+ NEW.status,
+ NEW.category,
+ NEW.description,
+ NEW.download_times,
+ NEW.license,
+ NEW.task,
+ NEW.release_id,
+ NEW.user_id,
+ NEW.repo_id,
+ NEW.created_unix,
+ NEW.updated_unix
+ );
+ end if;
+ RETURN NEW;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+
+
+CREATE TRIGGER es_insert_dataset
+ AFTER INSERT ON public.dataset
+ FOR EACH ROW EXECUTE PROCEDURE insert_dataset_data();
+
+ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_insert_dataset;
+
+
+DROP TRIGGER IF EXISTS es_udpate_dataset_file_name on public.attachment;
+
+CREATE OR REPLACE FUNCTION public.udpate_dataset_file_name() RETURNS trigger AS
+$def$
+ BEGIN
+ if (TG_OP = 'UPDATE') then
+ update public.dataset_es SET file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.dataset_id and is_private=false) where id=NEW.dataset_id;
+ elsif (TG_OP = 'INSERT') then
+ update public.dataset_es SET file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.dataset_id and is_private=false) where id=NEW.dataset_id;
+ elsif (TG_OP = 'DELETE') then
+ update public.dataset_es SET file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=OLD.dataset_id and is_private=false) where id=OLD.dataset_id;
+ update public.dataset_es SET file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=OLD.dataset_id and is_private=false) where id=OLD.dataset_id;
+ end if;
+ return NEW;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+
+CREATE TRIGGER es_udpate_dataset_file_name
+ AFTER INSERT OR UPDATE OR DELETE ON public.attachment
+ FOR EACH ROW EXECUTE PROCEDURE udpate_dataset_file_name();
+
+ALTER TABLE public.attachment ENABLE ALWAYS TRIGGER es_udpate_dataset_file_name;
+
+DROP TRIGGER IF EXISTS es_update_dataset on public.dataset;
+
+CREATE OR REPLACE FUNCTION public.update_dataset() RETURNS trigger AS
+$def$
+ BEGIN
+ UPDATE public.dataset_es
+ SET description=NEW.description,
+ title=NEW.title,
+ category=NEW.category,
+ task=NEW.task,
+ download_times=NEW.download_times,
+ updated_unix=NEW.updated_unix,
+ file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false),
+ file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false)
+ where id=NEW.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+CREATE TRIGGER es_update_dataset
+ AFTER UPDATE ON public.dataset
+ FOR EACH ROW EXECUTE PROCEDURE update_dataset();
+
+ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_update_dataset;
+
+DROP TRIGGER IF EXISTS es_delete_dataset on public.dataset;
+
+CREATE OR REPLACE FUNCTION public.delete_dataset() RETURNS trigger AS
+$def$
+ declare
+ BEGIN
+ DELETE FROM public.dataset_es where id=OLD.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+
+CREATE TRIGGER es_delete_dataset
+ AFTER DELETE ON public.dataset
+ FOR EACH ROW EXECUTE PROCEDURE delete_dataset();
+
+ALTER TABLE public.dataset ENABLE ALWAYS TRIGGER es_delete_dataset;
diff --git a/models/dbsql/issue_foreigntable_for_es.sql b/models/dbsql/issue_foreigntable_for_es.sql
new file mode 100644
index 000000000..bb5c1634e
--- /dev/null
+++ b/models/dbsql/issue_foreigntable_for_es.sql
@@ -0,0 +1,215 @@
+DROP FOREIGN TABLE public.issue_es;
+CREATE FOREIGN TABLE public.issue_es
+(
+ id bigint NOT NULL,
+ repo_id bigint,
+ index bigint,
+ poster_id bigint,
+ original_author character varying(255),
+ original_author_id bigint,
+ name character varying(255) ,
+ content text,
+ comment text,
+ milestone_id bigint,
+ priority integer,
+ is_closed boolean,
+ is_pull boolean,
+ pr_id bigint,
+ num_comments integer,
+ ref character varying(255),
+ deadline_unix bigint,
+ created_unix bigint,
+ updated_unix bigint,
+ closed_unix bigint,
+ is_locked boolean NOT NULL,
+ amount bigint,
+ is_transformed boolean NOT NULL
+)SERVER multicorn_es
+OPTIONS
+ (
+ host '192.168.207.94',
+ port '9200',
+ index 'issue-es-index',
+ rowid_column 'id',
+ default_sort '_id'
+ )
+;
+
+delete from public.issue_es;
+INSERT INTO public.issue_es(
+ id,
+ repo_id,
+ index,
+ poster_id,
+ original_author,
+ original_author_id,
+ name,
+ content,
+ milestone_id,
+ priority,
+ is_closed,
+ is_pull,
+ num_comments,
+ ref,
+ deadline_unix,
+ created_unix,
+ updated_unix,
+ closed_unix,
+ is_locked,
+ amount,
+ is_transformed,comment,pr_id)
+ SELECT
+ b.id,
+ b.repo_id,
+ b.index,
+ b.poster_id,
+ b.original_author,
+ b.original_author_id,
+ b.name,
+ b.content,
+ b.milestone_id,
+ b.priority,
+ b.is_closed,
+ b.is_pull,
+ b.num_comments,
+ b.ref,
+ b.deadline_unix,
+ b.created_unix,
+ b.updated_unix,
+ b.closed_unix,
+ b.is_locked,
+ b.amount,
+ b.is_transformed,
+ (select array_to_string(array_agg(content order by created_unix desc),',') from public.comment a where a.issue_id=b.id),
+ (select id from public.pull_request d where b.id=d.issue_id and b.is_pull=true)
+ FROM public.issue b,public.repository c where b.repo_id=c.id and c.is_private=false;
+
+
+CREATE OR REPLACE FUNCTION public.insert_issue_data() RETURNS trigger AS
+$def$
+ DECLARE
+ privateValue boolean=false;
+ BEGIN
+ select into privateValue is_private from public.repository where id=NEW.repo_id;
+ if not privateValue then
+ INSERT INTO public.issue_es(
+ id,
+ repo_id,
+ index,
+ poster_id,
+ original_author,
+ original_author_id,
+ name,
+ content,
+ milestone_id,
+ priority,
+ is_closed,
+ is_pull,
+ num_comments,
+ ref,
+ deadline_unix,
+ created_unix,
+ updated_unix,
+ closed_unix,
+ is_locked,
+ amount,
+ is_transformed)
+ VALUES (
+ NEW.id,
+ NEW.repo_id,
+ NEW.index,
+ NEW.poster_id,
+ NEW.original_author,
+ NEW.original_author_id,
+ NEW.name,
+ NEW.content,
+ NEW.milestone_id,
+ NEW.priority,
+ NEW.is_closed,
+ NEW.is_pull,
+ NEW.num_comments,
+ NEW.ref,
+ NEW.deadline_unix,
+ NEW.created_unix,
+ NEW.updated_unix,
+ NEW.closed_unix,
+ NEW.is_locked,
+ NEW.amount,
+ NEW.is_transformed
+ );
+ end if;
+ RETURN NEW;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS es_insert_issue on public.issue;
+
+CREATE TRIGGER es_insert_issue
+ AFTER INSERT ON public.issue
+ FOR EACH ROW EXECUTE PROCEDURE insert_issue_data();
+
+ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_insert_issue;
+
+CREATE OR REPLACE FUNCTION public.udpate_issue_comment() RETURNS trigger AS
+$def$
+ BEGIN
+ if (TG_OP = 'DELETE') then
+ update public.issue_es SET comment=(select array_to_string(array_agg(content order by created_unix desc),',') from public.comment where issue_id=OLD.issue_id) where id=OLD.issue_id;
+ elsif (TG_OP = 'UPDATE') then
+ update public.issue_es SET comment=(select array_to_string(array_agg(content order by created_unix desc),',') from public.comment where issue_id=NEW.issue_id) where id=NEW.issue_id;
+ end if;
+
+ return null;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS es_udpate_issue_comment on public.comment;
+CREATE TRIGGER es_udpate_issue_comment
+ AFTER DELETE OR UPDATE ON public.comment
+ FOR EACH ROW EXECUTE PROCEDURE udpate_issue_comment();
+
+ALTER TABLE public.comment ENABLE ALWAYS TRIGGER es_udpate_issue_comment;
+
+
+CREATE OR REPLACE FUNCTION public.update_issue() RETURNS trigger AS
+$def$
+ declare
+ BEGIN
+ UPDATE public.issue_es
+ SET content=NEW.content,
+ name=NEW.name,
+ is_closed=NEW.is_closed,
+ num_comments=NEW.num_comments,
+ comment=(select array_to_string(array_agg(content order by created_unix desc),',') from public.comment where issue_id=NEW.id)
+ where id=NEW.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS es_update_issue on public.issue;
+
+CREATE TRIGGER es_update_issue
+ AFTER UPDATE ON public.issue
+ FOR EACH ROW EXECUTE PROCEDURE update_issue();
+
+ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_update_issue;
+
+CREATE OR REPLACE FUNCTION public.delete_issue() RETURNS trigger AS
+$def$
+ declare
+ BEGIN
+ DELETE FROM public.issue_es where id=OLD.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+DROP TRIGGER IF EXISTS es_delete_issue on public.issue;
+CREATE TRIGGER es_delete_issue
+ AFTER DELETE ON public.issue
+ FOR EACH ROW EXECUTE PROCEDURE delete_issue();
+
+ALTER TABLE public.issue ENABLE ALWAYS TRIGGER es_delete_issue;
\ No newline at end of file
diff --git a/models/dbsql/repo_foreigntable_for_es.sql b/models/dbsql/repo_foreigntable_for_es.sql
new file mode 100644
index 000000000..f51155ccf
--- /dev/null
+++ b/models/dbsql/repo_foreigntable_for_es.sql
@@ -0,0 +1,532 @@
+-- 要处理项目从私有变为公有,并且从公有变成私有的情况
+DROP FOREIGN table if exists public.repository_es;
+CREATE FOREIGN TABLE public.repository_es (
+ id bigint NOT NULL,
+ owner_id bigint,
+ owner_name character varying(255),
+ lower_name character varying(255) NOT NULL,
+ name character varying(255) NOT NULL,
+ description text,
+ website character varying(2048),
+ original_service_type integer,
+ original_url character varying(2048),
+ default_branch character varying(255),
+ num_watches integer,
+ num_stars integer,
+ num_forks integer,
+ num_issues integer,
+ num_closed_issues integer,
+ num_pulls integer,
+ num_closed_pulls integer,
+ num_milestones integer DEFAULT 0 NOT NULL,
+ num_closed_milestones integer DEFAULT 0 NOT NULL,
+ is_private boolean,
+ is_empty boolean,
+ is_archived boolean,
+ is_mirror boolean,
+ status integer DEFAULT 0 NOT NULL,
+ is_fork boolean DEFAULT false NOT NULL,
+ fork_id bigint,
+ is_template boolean DEFAULT false NOT NULL,
+ template_id bigint,
+ size bigint DEFAULT 0 NOT NULL,
+ is_fsck_enabled boolean DEFAULT true NOT NULL,
+ close_issues_via_commit_in_any_branch boolean DEFAULT false NOT NULL,
+ topics text,
+ avatar character varying(64),
+ created_unix bigint,
+ updated_unix bigint,
+ contract_address character varying(255),
+ block_chain_status integer DEFAULT 0 NOT NULL,
+ balance character varying(255) DEFAULT '0'::character varying NOT NULL,
+ clone_cnt bigint DEFAULT 0 NOT NULL,
+ license character varying(100),
+ download_cnt bigint DEFAULT 0 NOT NULL,
+ num_commit bigint DEFAULT 0 NOT NULL,
+ git_clone_cnt bigint DEFAULT 0 NOT NULL,
+ creator_id bigint NOT NULL DEFAULT 0,
+ repo_type integer NOT NULL DEFAULT 0,
+ lang character varying(2048),
+ alias character varying(255),
+ lower_alias character varying(255)
+) SERVER multicorn_es
+OPTIONS
+ (
+ host '192.168.207.94',
+ port '9200',
+ index 'repository-es-index',
+ rowid_column 'id',
+ default_sort '_id'
+ )
+;
+delete from public.repository_es;
+ INSERT INTO public.repository_es (id,
+ owner_id,
+ owner_name,
+ lower_name,
+ name,
+ description,
+ website,
+ original_service_type,
+ original_url,
+ default_branch,
+ num_watches,
+ num_stars,
+ num_forks,
+ num_issues,
+ num_closed_issues,
+ num_pulls,
+ num_closed_pulls,
+ num_milestones,
+ num_closed_milestones,
+ is_private,
+ is_empty,
+ is_archived,
+ is_mirror,
+ status,
+ is_fork,
+ fork_id,
+ is_template,
+ template_id,
+ size,
+ is_fsck_enabled,
+ close_issues_via_commit_in_any_branch,
+ topics,
+ avatar,
+ created_unix,
+ updated_unix,
+ contract_address,
+ block_chain_status,
+ balance,
+ clone_cnt,
+ num_commit,
+ git_clone_cnt,
+ creator_id,
+ repo_type,
+ lang,
+ alias,
+ lower_alias
+ )
+ SELECT
+ id,
+ owner_id,
+ owner_name,
+ lower_name,
+ name,
+ description,
+ website,
+ original_service_type,
+ original_url,
+ default_branch,
+ num_watches,
+ num_stars,
+ num_forks,
+ num_issues,
+ num_closed_issues,
+ num_pulls,
+ num_closed_pulls,
+ num_milestones,
+ num_closed_milestones,
+ is_private,
+ is_empty,
+ is_archived,
+ is_mirror,
+ status,
+ is_fork,
+ fork_id,
+ is_template,
+ template_id,
+ size,
+ is_fsck_enabled,
+ close_issues_via_commit_in_any_branch,
+ topics,
+ avatar,
+ created_unix,
+ updated_unix,
+ contract_address,
+ block_chain_status,
+ balance,
+ clone_cnt,
+ num_commit,
+ git_clone_cnt,
+ creator_id,
+ repo_type,
+ (select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat a where a.repo_id=b.id),
+ alias,
+ lower_alias
+ FROM public.repository b where b.is_private=false;
+
+DROP TRIGGER IF EXISTS es_insert_repository on public.repository;
+
+CREATE OR REPLACE FUNCTION public.insert_repository_data() RETURNS trigger AS
+$def$
+ BEGIN
+ if not NEW.is_private then
+ INSERT INTO public.repository_es (id,
+ owner_id,
+ owner_name,
+ lower_name,
+ name,
+ description,
+ website,
+ original_service_type,
+ original_url,
+ default_branch,
+ num_watches,
+ num_stars,
+ num_forks,
+ num_issues,
+ num_closed_issues,
+ num_pulls,
+ num_closed_pulls,
+ num_milestones,
+ num_closed_milestones,
+ is_private,
+ is_empty,
+ is_archived,
+ is_mirror,
+ status,
+ is_fork,
+ fork_id,
+ is_template,
+ template_id,
+ size,
+ is_fsck_enabled,
+ close_issues_via_commit_in_any_branch,
+ topics,
+ avatar,
+ created_unix,
+ updated_unix,
+ contract_address,
+ block_chain_status,
+ balance,
+ clone_cnt,
+ num_commit,
+ git_clone_cnt,
+ creator_id,
+ repo_type,
+ alias,
+ lower_alias) VALUES
+ (NEW.id,
+ NEW.owner_id,
+ NEW.owner_name,
+ NEW.lower_name,
+ NEW.name,
+ NEW.description,
+ NEW.website,
+ NEW.original_service_type,
+ NEW.original_url,
+ NEW.default_branch,
+ NEW.num_watches,
+ NEW.num_stars,
+ NEW.num_forks,
+ NEW.num_issues,
+ NEW.num_closed_issues,
+ NEW.num_pulls,
+ NEW.num_closed_pulls,
+ NEW.num_milestones,
+ NEW.num_closed_milestones,
+ NEW.is_private,
+ NEW.is_empty,
+ NEW.is_archived,
+ NEW.is_mirror,
+ NEW.status,
+ NEW.is_fork,
+ NEW.fork_id,
+ NEW.is_template,
+ NEW.template_id,
+ NEW.size,
+ NEW.is_fsck_enabled,
+ NEW.close_issues_via_commit_in_any_branch,
+ NEW.topics,
+ NEW.avatar,
+ NEW.created_unix,
+ NEW.updated_unix,
+ NEW.contract_address,
+ NEW.block_chain_status,
+ NEW.balance,
+ NEW.clone_cnt,
+ NEW.num_commit,
+ NEW.git_clone_cnt,
+ NEW.creator_id,
+ NEW.repo_type,
+ NEW.alias,
+ NEW.lower_alias);
+ end if;
+ RETURN NEW;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+
+CREATE TRIGGER es_insert_repository
+ AFTER INSERT ON public.repository
+ FOR EACH ROW EXECUTE PROCEDURE insert_repository_data();
+
+ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_insert_repository;
+
+DROP TRIGGER IF EXISTS es_update_repository on public.repository;
+
+CREATE OR REPLACE FUNCTION public.update_repository() RETURNS trigger AS
+$def$
+ BEGIN
+ if OLD.is_private != NEW.is_private then
+ if OLD.is_private and not NEW.is_private then
+ --insert
+ INSERT INTO public.repository_es (id,
+ owner_id,
+ owner_name,
+ lower_name,
+ name,
+ description,
+ website,
+ original_service_type,
+ original_url,
+ default_branch,
+ num_watches,
+ num_stars,
+ num_forks,
+ num_issues,
+ num_closed_issues,
+ num_pulls,
+ num_closed_pulls,
+ num_milestones,
+ num_closed_milestones,
+ is_private,
+ is_empty,
+ is_archived,
+ is_mirror,
+ status,
+ is_fork,
+ fork_id,
+ is_template,
+ template_id,
+ size,
+ is_fsck_enabled,
+ close_issues_via_commit_in_any_branch,
+ topics,
+ avatar,
+ created_unix,
+ updated_unix,
+ contract_address,
+ block_chain_status,
+ balance,
+ clone_cnt,
+ num_commit,
+ git_clone_cnt,
+ creator_id,
+ repo_type,
+ lang,
+ alias,
+ lower_alias)
+ SELECT
+ id,
+ owner_id,
+ owner_name,
+ lower_name,
+ name,
+ description,
+ website,
+ original_service_type,
+ original_url,
+ default_branch,
+ num_watches,
+ num_stars,
+ num_forks,
+ num_issues,
+ num_closed_issues,
+ num_pulls,
+ num_closed_pulls,
+ num_milestones,
+ num_closed_milestones,
+ is_private,
+ is_empty,
+ is_archived,
+ is_mirror,
+ status,
+ is_fork,
+ fork_id,
+ is_template,
+ template_id,
+ size,
+ is_fsck_enabled,
+ close_issues_via_commit_in_any_branch,
+ topics,
+ avatar,
+ created_unix,
+ updated_unix,
+ contract_address,
+ block_chain_status,
+ balance,
+ clone_cnt,
+ num_commit,
+ git_clone_cnt,
+ creator_id,
+ repo_type,
+ (select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat a where a.repo_id=b.id),
+ alias,
+ lower_alias
+ FROM public.repository b where b.id=NEW.id;
+ INSERT INTO public.dataset_es(
+ id,
+ title,
+ status,
+ category,
+ description,
+ download_times,
+ license, task,
+ release_id,
+ user_id,
+ repo_id,
+ created_unix,
+ updated_unix,file_name)
+ SELECT
+ b.id,
+ b.title,
+ b.status,
+ b.category,
+ b.description,
+ b.download_times,
+ b.license,
+ b.task,
+ b.release_id,
+ b.user_id,
+ b.repo_id,
+ b.created_unix,
+ b.updated_unix,(select array_to_string(array_agg(name order by created_unix desc),',') from public.attachment a where a.dataset_id=b.id and a.is_private=false)
+ FROM public.dataset b where b.repo_id=NEW.id;
+
+ INSERT INTO public.issue_es(
+ id,
+ repo_id,
+ index,
+ poster_id,
+ original_author,
+ original_author_id,
+ name,
+ content,
+ milestone_id,
+ priority,
+ is_closed,
+ is_pull,
+ num_comments,
+ ref,
+ deadline_unix,
+ created_unix,
+ updated_unix,
+ closed_unix,
+ is_locked,
+ amount,
+ is_transformed,comment,pr_id)
+ SELECT
+ b.id,
+ b.repo_id,
+ b.index,
+ b.poster_id,
+ b.original_author,
+ b.original_author_id,
+ b.name,
+ b.content,
+ b.milestone_id,
+ b.priority,
+ b.is_closed,
+ b.is_pull,
+ b.num_comments,
+ b.ref,
+ b.deadline_unix,
+ b.created_unix,
+ b.updated_unix,
+ b.closed_unix,
+ b.is_locked,
+ b.amount,
+ b.is_transformed,
+ (select array_to_string(array_agg(content order by created_unix desc),',') from public.comment a where a.issue_id=b.id),
+ (select id from public.pull_request d where d.issue_id=b.id)
+ FROM public.issue b where b.repo_id=NEW.id;
+
+ end if;
+
+ if not OLD.is_private and NEW.is_private then
+ delete from public.issue_es where repo_id=NEW.id;
+ delete from public.dataset_es where repo_id=NEW.id;
+ delete from public.repository_es where id=NEW.id;
+ end if;
+
+ end if;
+
+ if not NEW.is_private then
+ raise notice 'update repo,the updated_unix is %',NEW.updated_unix;
+ update public.repository_es SET description=NEW.description,
+ name=NEW.name,
+ lower_name=NEW.lower_name,
+ owner_name=NEW.owner_name,
+ website=NEW.website,
+ updated_unix=NEW.updated_unix,
+ num_watches=NEW.num_watches,
+ num_stars=NEW.num_stars,
+ num_forks=NEW.num_forks,
+ topics=NEW.topics,
+ alias = NEW.alias,
+ lower_alias = NEW.lower_alias,
+ avatar=NEW.avatar
+ where id=NEW.id;
+ end if;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+CREATE TRIGGER es_update_repository
+ AFTER UPDATE ON public.repository
+ FOR EACH ROW EXECUTE PROCEDURE update_repository();
+
+ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_update_repository;
+
+
+DROP TRIGGER IF EXISTS es_delete_repository on public.repository;
+
+CREATE OR REPLACE FUNCTION public.delete_repository() RETURNS trigger AS
+$def$
+ declare
+ BEGIN
+ delete from public.issue_es where repo_id=OLD.id;
+ delete from public.dataset_es where repo_id=OLD.id;
+ DELETE FROM public.repository_es where id=OLD.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+
+CREATE TRIGGER es_delete_repository
+ AFTER DELETE ON public.repository
+ FOR EACH ROW EXECUTE PROCEDURE delete_repository();
+
+ALTER TABLE public.repository ENABLE ALWAYS TRIGGER es_delete_repository;
+
+
+
+DROP TRIGGER IF EXISTS es_udpate_repository_lang on public.language_stat;
+
+CREATE OR REPLACE FUNCTION public.udpate_repository_lang() RETURNS trigger AS
+$def$
+ BEGIN
+ if (TG_OP = 'UPDATE') then
+ update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id;
+ elsif (TG_OP = 'INSERT') then
+ update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=NEW.repo_id) where id=NEW.repo_id;
+ elsif (TG_OP = 'DELETE') then
+ if exists(select 1 from public.repository where id=OLD.repo_id) then
+ update public.repository_es SET lang=(select array_to_string(array_agg(language order by percentage desc),',') from public.language_stat where repo_id=OLD.repo_id) where id=OLD.repo_id;
+ end if;
+ end if;
+ return null;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+CREATE TRIGGER es_udpate_repository_lang
+ AFTER INSERT OR UPDATE OR DELETE ON public.language_stat
+ FOR EACH ROW EXECUTE PROCEDURE udpate_repository_lang();
+
+ALTER TABLE public.language_stat ENABLE ALWAYS TRIGGER es_udpate_repository_lang;
\ No newline at end of file
diff --git a/models/dbsql/user_foreigntable_for_es.sql b/models/dbsql/user_foreigntable_for_es.sql
new file mode 100644
index 000000000..c3d21b92a
--- /dev/null
+++ b/models/dbsql/user_foreigntable_for_es.sql
@@ -0,0 +1,308 @@
+DROP FOREIGN table if exists public.user_es;
+CREATE FOREIGN TABLE public.user_es
+(
+ id bigint NOT NULL ,
+ lower_name character varying(255) NULL,
+ name character varying(255) NULL,
+ full_name character varying(255),
+ email character varying(255),
+ keep_email_private boolean,
+ email_notifications_preference character varying(20) ,
+ passwd character varying(255) ,
+ passwd_hash_algo character varying(255) ,
+ must_change_password boolean NOT NULL DEFAULT false,
+ login_type integer,
+ login_source bigint NOT NULL DEFAULT 0,
+ login_name character varying(255) ,
+ type integer,
+ location character varying(255),
+ website character varying(255),
+ rands character varying(10),
+ salt character varying(10),
+ language character varying(5),
+ description character varying(255),
+ created_unix bigint,
+ updated_unix bigint,
+ last_login_unix bigint,
+ last_repo_visibility boolean,
+ max_repo_creation integer,
+ is_active boolean,
+ is_admin boolean,
+ is_restricted boolean NOT NULL DEFAULT false,
+ allow_git_hook boolean,
+ allow_import_local boolean,
+ allow_create_organization boolean DEFAULT true,
+ prohibit_login boolean NOT NULL DEFAULT false,
+ avatar character varying(2048) ,
+ avatar_email character varying(255),
+ use_custom_avatar boolean,
+ num_followers integer,
+ num_following integer NOT NULL DEFAULT 0,
+ num_stars integer,
+ num_repos integer,
+ num_teams integer,
+ num_members integer,
+ visibility integer NOT NULL DEFAULT 0,
+ repo_admin_change_team_access boolean NOT NULL DEFAULT false,
+ diff_view_style character varying(255),
+ theme character varying(255),
+ token character varying(1024) ,
+ public_key character varying(255),
+ private_key character varying(255),
+ is_operator boolean NOT NULL DEFAULT false,
+ num_dataset_stars integer NOT NULL DEFAULT 0
+) SERVER multicorn_es
+OPTIONS
+ (
+ host '192.168.207.94',
+ port '9200',
+ index 'user-es-index',
+ rowid_column 'id',
+ default_sort '_id'
+ )
+;
+delete from public.user_es;
+ INSERT INTO public.user_es(
+ id,
+ lower_name,
+ name,
+ full_name,
+ email,
+ keep_email_private,
+ email_notifications_preference,
+ must_change_password,
+ login_type,
+ login_source,
+ login_name,
+ type,
+ location,
+ website,
+ rands,
+ language,
+ description,
+ created_unix,
+ updated_unix,
+ last_login_unix,
+ last_repo_visibility,
+ max_repo_creation,
+ is_active,
+ is_restricted,
+ allow_git_hook,
+ allow_import_local,
+ allow_create_organization,
+ prohibit_login,
+ avatar,
+ avatar_email,
+ use_custom_avatar,
+ num_followers,
+ num_following,
+ num_stars,
+ num_repos,
+ num_teams,
+ num_members,
+ visibility,
+ repo_admin_change_team_access,
+ diff_view_style,
+ theme,
+ is_operator,
+ num_dataset_stars)
+ SELECT
+ id,
+ lower_name,
+ name,
+ full_name,
+ email,
+ keep_email_private,
+ email_notifications_preference,
+ must_change_password,
+ login_type,
+ login_source,
+ login_name,
+ type,
+ location,
+ website,
+ rands,
+ language,
+ description,
+ created_unix,
+ updated_unix,
+ last_login_unix,
+ last_repo_visibility,
+ max_repo_creation,
+ is_active,
+ is_restricted,
+ allow_git_hook,
+ allow_import_local,
+ allow_create_organization,
+ prohibit_login,
+ avatar,
+ avatar_email,
+ use_custom_avatar,
+ num_followers,
+ num_following,
+ num_stars,
+ num_repos,
+ num_teams,
+ num_members,
+ visibility,
+ repo_admin_change_team_access,
+ diff_view_style,
+ theme,
+ is_operator,
+ num_dataset_stars
+ FROM public.user;
+
+DROP TRIGGER IF EXISTS es_insert_user on public.user;
+
+CREATE OR REPLACE FUNCTION public.insert_user_data() RETURNS trigger AS
+$def$
+ BEGIN
+ INSERT INTO public."user_es"(
+ id,
+ lower_name,
+ name,
+ full_name,
+ email,
+ keep_email_private,
+ email_notifications_preference,
+ must_change_password,
+ login_type,
+ login_source,
+ login_name,
+ type,
+ location,
+ website,
+ rands,
+ language,
+ description,
+ created_unix,
+ updated_unix,
+ last_login_unix,
+ last_repo_visibility,
+ max_repo_creation,
+ is_active,
+ is_restricted,
+ allow_git_hook,
+ allow_import_local,
+ allow_create_organization,
+ prohibit_login,
+ avatar,
+ avatar_email,
+ use_custom_avatar,
+ num_followers,
+ num_following,
+ num_stars,
+ num_repos,
+ num_teams,
+ num_members,
+ visibility,
+ repo_admin_change_team_access,
+ diff_view_style,
+ theme,
+ is_operator,
+ num_dataset_stars)
+ VALUES (
+ NEW.id,
+ NEW.lower_name,
+ NEW.name,
+ NEW.full_name,
+ NEW.email,
+ NEW.keep_email_private,
+ NEW.email_notifications_preference,
+ NEW.must_change_password,
+ NEW.login_type,
+ NEW.login_source,
+ NEW.login_name,
+ NEW.type,
+ NEW.location,
+ NEW.website,
+ NEW.rands,
+ NEW.language,
+ NEW.description,
+ NEW.created_unix,
+ NEW.updated_unix,
+ NEW.last_login_unix,
+ NEW.last_repo_visibility,
+ NEW.max_repo_creation,
+ NEW.is_active,
+ NEW.is_restricted,
+ NEW.allow_git_hook,
+ NEW.allow_import_local,
+ NEW.allow_create_organization,
+ NEW.prohibit_login,
+ NEW.avatar,
+ NEW.avatar_email,
+ NEW.use_custom_avatar,
+ NEW.num_followers,
+ NEW.num_following,
+ NEW.num_stars,
+ NEW.num_repos,
+ NEW.num_teams,
+ NEW.num_members,
+ NEW.visibility,
+ NEW.repo_admin_change_team_access,
+ NEW.diff_view_style,
+ NEW.theme,
+ NEW.is_operator,
+ NEW.num_dataset_stars
+ );
+
+ RETURN NEW;
+ END;
+$def$
+LANGUAGE plpgsql;
+
+
+
+CREATE TRIGGER es_insert_user
+ AFTER INSERT ON public.user
+ FOR EACH ROW EXECUTE PROCEDURE insert_user_data();
+
+ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_insert_user;
+
+DROP TRIGGER IF EXISTS es_update_user on public.user;
+
+CREATE OR REPLACE FUNCTION public.update_user() RETURNS trigger AS
+$def$
+ BEGIN
+ UPDATE public.user_es
+ SET description=NEW.description,
+ name=NEW.name,
+ full_name=NEW.full_name,
+ location=NEW.location,
+ website=NEW.website,
+ email=NEW.email,
+ num_dataset_stars=NEW.num_dataset_stars,
+ updated_unix=NEW.updated_unix
+ where id=NEW.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+
+
+CREATE TRIGGER es_update_user
+ AFTER UPDATE ON public.user
+ FOR EACH ROW EXECUTE PROCEDURE update_user();
+
+ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_update_user;
+
+DROP TRIGGER IF EXISTS es_delete_user on public.user;
+
+CREATE OR REPLACE FUNCTION public.delete_user() RETURNS trigger AS
+$def$
+ declare
+ BEGIN
+ DELETE FROM public.user_es where id=OLD.id;
+ return new;
+ END
+$def$
+LANGUAGE plpgsql;
+
+
+CREATE TRIGGER es_delete_user
+ AFTER DELETE ON public.user
+ FOR EACH ROW EXECUTE PROCEDURE delete_user();
+
+ALTER TABLE public.user ENABLE ALWAYS TRIGGER es_delete_user;
\ No newline at end of file
diff --git a/models/file_chunk.go b/models/file_chunk.go
index 76c926dc5..0fc3a8879 100755
--- a/models/file_chunk.go
+++ b/models/file_chunk.go
@@ -13,11 +13,6 @@ const (
FileUploaded
)
-const (
- TypeCloudBrainOne int = iota
- TypeCloudBrainTwo
-)
-
type FileChunk struct {
ID int64 `xorm:"pk autoincr"`
UUID string `xorm:"uuid UNIQUE"`
diff --git a/models/models.go b/models/models.go
index 02cfb3144..c9aca6114 100755
--- a/models/models.go
+++ b/models/models.go
@@ -129,6 +129,7 @@ func init() {
new(LanguageStat),
new(EmailHash),
new(Dataset),
+ new(DatasetStar),
new(Cloudbrain),
new(FileChunk),
new(BlockChain),
@@ -137,6 +138,7 @@ func init() {
new(OfficialTag),
new(OfficialTagRepos),
new(WechatBindLog),
+ new(SearchRecord),
)
tablesStatistic = append(tablesStatistic,
diff --git a/models/repo.go b/models/repo.go
index 2d1fdacfb..25bfb4a74 100755
--- a/models/repo.go
+++ b/models/repo.go
@@ -6,13 +6,14 @@
package models
import (
- "code.gitea.io/gitea/modules/git"
"context"
"crypto/md5"
"errors"
"fmt"
"html/template"
"math/rand"
+
+ "code.gitea.io/gitea/modules/git"
"xorm.io/xorm"
"code.gitea.io/gitea/modules/blockchain"
@@ -1280,10 +1281,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr
return fmt.Errorf("copyDefaultWebhooksToRepo: %v", err)
}
- if err = CreateDefaultDatasetToRepo(repo); err != nil {
- return fmt.Errorf("models.CreateDefaultDatasetToRepo: %v", err)
- }
-
return nil
}
@@ -1601,6 +1598,36 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e
if err != nil {
return err
}
+ //If repo has become private, we need set dataset and dataset_file to private
+ _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{
+ Status: 0,
+ })
+ if err != nil {
+ return err
+ }
+
+ dataset, err := GetDatasetByRepo(repo)
+ if err != nil && !IsErrNotExist(err) {
+ return err
+ }
+ if dataset != nil {
+ _, err = e.Where("dataset_id = ?", dataset.ID).Cols("is_private").Update(&Attachment{
+ IsPrivate: true,
+ })
+ if err != nil {
+ return err
+ }
+ }
+
+ } else {
+ //If repo has become public, we need set dataset to public
+ _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{
+ Status: 1,
+ })
+ if err != nil {
+ return err
+ }
+
}
// Create/Remove git-daemon-export-ok for git-daemon...
@@ -2691,7 +2718,7 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
log.Error("ReadLatestFileInRepo error when OpenRepository,error=%v", err)
return nil, err
}
- commitID, err := gitRepo.GetBranchCommitID(refName)
+ _, err = gitRepo.GetBranchCommitID(refName)
if err != nil {
log.Error("ReadLatestFileInRepo error when GetBranchCommitID,error=%v", err)
return nil, err
@@ -2723,5 +2750,9 @@ func ReadLatestFileInRepo(userName, repoName, refName, treePath string) (*RepoFi
if n >= 0 {
buf = buf[:n]
}
- return &RepoFile{CommitId: commitID, Content: buf}, nil
+ commitId := ""
+ if blob != nil {
+ commitId = fmt.Sprint(blob.ID)
+ }
+ return &RepoFile{CommitId: commitId, Content: buf}, nil
}
diff --git a/models/repo_list.go b/models/repo_list.go
index 6fb9380de..5bf0ecf03 100755
--- a/models/repo_list.go
+++ b/models/repo_list.go
@@ -190,7 +190,8 @@ type SearchRepoOptions struct {
// None -> include all repos
// True -> include just courses
// False -> include just no courses
- Course util.OptionalBool
+ Course util.OptionalBool
+ OnlySearchPrivate bool
}
//SearchOrderBy is used to sort the result
@@ -219,12 +220,15 @@ const (
SearchOrderByDownloadTimes SearchOrderBy = "download_times DESC"
SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
+ SearchOrderByWatches SearchOrderBy = "num_watches DESC"
)
// SearchRepositoryCondition creates a query condition according search repository options
func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
var cond = builder.NewCond()
-
+ if opts.OnlySearchPrivate {
+ cond = cond.And(builder.Eq{"is_private": true})
+ }
if opts.Private {
if opts.Actor != nil && !opts.Actor.IsAdmin && opts.Actor.ID != opts.OwnerID {
// OK we're in the context of a User
@@ -337,7 +341,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond {
if !opts.TopicOnly {
var likes = builder.NewCond()
for _, v := range strings.Split(opts.Keyword, ",") {
- likes = likes.Or(builder.Like{"lower_name", strings.ToLower(v)})
+ likes = likes.Or(builder.Like{"lower_alias", strings.ToLower(v)})
likes = likes.Or(builder.Like{"alias", v})
if opts.IncludeDescription {
likes = likes.Or(builder.Like{"LOWER(description)", strings.ToLower(v)})
diff --git a/models/search_record.go b/models/search_record.go
new file mode 100644
index 000000000..d9d85a591
--- /dev/null
+++ b/models/search_record.go
@@ -0,0 +1,83 @@
+package models
+
+import (
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+ "xorm.io/xorm"
+)
+
+type SearchRecord struct {
+ ID int64 `xorm:"pk autoincr"`
+ //user
+ Keyword string `xorm:"NOT NULL"`
+ //
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+}
+
+func SaveSearchKeywordToDb(keyword string) error {
+ record := &SearchRecord{
+ Keyword: keyword,
+ }
+ sess := x.NewSession()
+ defer sess.Close()
+ _, err := sess.Insert(record)
+ if err != nil {
+ log.Info("insert error." + err.Error())
+ return err
+ }
+ return nil
+}
+
+func setIssueQueryCondition(sess *xorm.Session, Keyword string, isPull bool, userId int64) {
+ sess.And("issue.poster_id=?", userId)
+ sess.And("issue.is_pull=?", isPull)
+ sess.And("(issue.name like '%" + Keyword + "%' or issue.content like '%" + Keyword + "%')")
+ sess.Join("INNER", "repository", "issue.repo_id = repository.id").And("repository.is_private = ?", true)
+}
+
+func SearchPrivateIssueOrPr(Page int, PageSize int, Keyword string, isPull bool, userId int64) ([]*Issue, int64, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+ setIssueQueryCondition(sess, Keyword, isPull, userId)
+ count, err := sess.Count(new(Issue))
+ if err != nil {
+ return nil, 0, err
+ }
+
+ setIssueQueryCondition(sess, Keyword, isPull, userId)
+ sess.Desc("issue.created_unix")
+ sess.Limit(PageSize, (Page-1)*PageSize)
+ issues := make([]*Issue, 0)
+ if err := sess.Find(&issues); err != nil {
+ return nil, 0, err
+ } else {
+ return issues, count, nil
+ }
+}
+
+func setDataSetQueryCondition(sess *xorm.Session, Keyword string, userId int64) {
+ sess.And("dataset.user_id=?", userId)
+ sess.And("(dataset.title like '%" + Keyword + "%' or dataset.description like '%" + Keyword + "%')")
+ sess.Join("INNER", "repository", "dataset.repo_id = repository.id").And("repository.is_private = ?", true)
+}
+
+func SearchDatasetBySQL(Page int, PageSize int, Keyword string, userId int64) ([]*Dataset, int64, error) {
+ sess := x.NewSession()
+ defer sess.Close()
+ setDataSetQueryCondition(sess, Keyword, userId)
+ count, err := sess.Count(new(Dataset))
+ if err != nil {
+ return nil, 0, err
+ }
+
+ setDataSetQueryCondition(sess, Keyword, userId)
+ sess.Desc("dataset.created_unix")
+ sess.Limit(PageSize, (Page-1)*PageSize)
+ datasets := make([]*Dataset, 0)
+ if err := sess.Find(&datasets); err != nil {
+ return nil, 0, err
+ } else {
+ return datasets, count, nil
+ }
+
+}
diff --git a/models/user.go b/models/user.go
index f7857248b..f72462051 100755
--- a/models/user.go
+++ b/models/user.go
@@ -153,10 +153,11 @@ type User struct {
UseCustomAvatar bool
// Counters
- NumFollowers int
- NumFollowing int `xorm:"NOT NULL DEFAULT 0"`
- NumStars int
- NumRepos int
+ NumFollowers int
+ NumFollowing int `xorm:"NOT NULL DEFAULT 0"`
+ NumStars int
+ NumDatasetStars int `xorm:"NOT NULL DEFAULT 0"`
+ NumRepos int
// For organization
NumTeams int
diff --git a/modules/auth/cloudbrain.go b/modules/auth/cloudbrain.go
index 9949feddc..9d3d6290f 100755
--- a/modules/auth/cloudbrain.go
+++ b/modules/auth/cloudbrain.go
@@ -20,6 +20,9 @@ type CreateCloudBrainForm struct {
ResourceSpecId int `form:"resource_spec_id" binding:"Required"`
BenchmarkTypeID int `form:"benchmark_types_id"`
BenchmarkChildTypeID int `form:"benchmark_child_types_id"`
+ BootFile string `form:"boot_file"`
+ Params string `form:"run_para_list"`
+ BranchName string `form:"branch_name"`
}
type CommitImageCloudBrainForm struct {
diff --git a/modules/auth/dataset.go b/modules/auth/dataset.go
index 577637273..71b5ac938 100755
--- a/modules/auth/dataset.go
+++ b/modules/auth/dataset.go
@@ -9,11 +9,10 @@ import (
type CreateDatasetForm struct {
Title string `binding:"Required"`
Category string `binding:"Required"`
- Description string `binding:"Required;MaxSize(254)"`
+ Description string `binding:"Required"`
License string `binding:"Required;MaxSize(64)"`
Task string `binding:"Required;MaxSize(64)"`
ReleaseID int64 `xorm:"INDEX"`
- Private bool
Files []string
}
@@ -25,11 +24,23 @@ type EditDatasetForm struct {
ID int64 `binding:"Required"`
Title string `binding:"Required"`
Category string `binding:"Required"`
- Description string `binding:"Required;MaxSize(254)"`
+ Description string `binding:"Required"`
License string `binding:"Required;MaxSize(64)"`
Task string `binding:"Required;MaxSize(64)"`
- Private bool
- ReleaseID int64 `xorm:"INDEX"`
+ ReleaseID int64 `xorm:"INDEX"`
Files []string
- Type string `binding:"Required"`
+ Type string `binding:"Required"`
+}
+
+func (f *EditDatasetForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
+ return validate(errs, ctx.Data, f, ctx.Locale)
+}
+
+type EditAttachmentForm struct {
+ ID int64 `binding:"Required"`
+ Description string
+}
+
+func (f *EditAttachmentForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {
+ return validate(errs, ctx.Data, f, ctx.Locale)
}
diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go
index 54ac0c7ac..4a89f9393 100755
--- a/modules/cloudbrain/cloudbrain.go
+++ b/modules/cloudbrain/cloudbrain.go
@@ -15,14 +15,13 @@ import (
)
const (
- Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;
- service ssh stop;
- jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"`
+ Command = `pip3 install jupyterlab==2.2.5 -i https://pypi.tuna.tsinghua.edu.cn/simple;service ssh stop;jupyter lab --no-browser --ip=0.0.0.0 --allow-root --notebook-dir="/code" --port=80 --LabApp.token="" --LabApp.allow_origin="self https://cloudbrain.pcl.ac.cn"`
//CommandBenchmark = `echo "start benchmark";python /code/test.py;echo "end benchmark"`
CommandBenchmark = `echo "start benchmark";cd /benchmark && bash run_bk.sh;echo "end benchmark"`
CodeMountPath = "/code"
DataSetMountPath = "/dataset"
ModelMountPath = "/model"
+ LogFile = "log.txt"
BenchMarkMountPath = "/benchmark"
BenchMarkResourceID = 1
Snn4imagenetMountPath = "/snn4imagenet"
@@ -32,10 +31,13 @@ const (
SubTaskName = "task1"
Success = "S000"
+
+ DefaultBranchName = "master"
)
var (
- ResourceSpecs *models.ResourceSpecs
+ ResourceSpecs *models.ResourceSpecs
+ TrainResourceSpecs *models.ResourceSpecs
)
func isAdminOrOwnerOrJobCreater(ctx *context.Context, job *models.Cloudbrain, err error) bool {
@@ -147,7 +149,7 @@ func AdminOrJobCreaterRightForTrain(ctx *context.Context) {
}
-func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue, description string, benchmarkTypeID, benchmarkChildTypeID, resourceSpecId int) error {
+func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command, uuid, codePath, modelPath, benchmarkPath, snn4imagenetPath, brainScorePath, jobType, gpuQueue, description, branchName, bootFile, params string, benchmarkTypeID, benchmarkChildTypeID, resourceSpecId int) error {
dataActualPath := setting.Attachment.Minio.RealPath +
setting.Attachment.Minio.Bucket + "/" +
setting.Attachment.Minio.BasePath +
@@ -155,13 +157,27 @@ func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command,
uuid
var resourceSpec *models.ResourceSpec
- if ResourceSpecs == nil {
- json.Unmarshal([]byte(setting.ResourceSpecs), &ResourceSpecs)
- }
- for _, spec := range ResourceSpecs.ResourceSpec {
- if resourceSpecId == spec.Id {
- resourceSpec = spec
+ var versionCount int
+ if jobType == string(models.JobTypeTrain) {
+ versionCount = 1
+ if TrainResourceSpecs == nil {
+ json.Unmarshal([]byte(setting.TrainResourceSpecs), &TrainResourceSpecs)
+ }
+ for _, spec := range TrainResourceSpecs.ResourceSpec {
+ if resourceSpecId == spec.Id {
+ resourceSpec = spec
+ }
+ }
+ } else {
+ if ResourceSpecs == nil {
+ json.Unmarshal([]byte(setting.ResourceSpecs), &ResourceSpecs)
+ }
+ for _, spec := range ResourceSpecs.ResourceSpec {
+ if resourceSpecId == spec.Id {
+ resourceSpec = spec
+ }
}
+
}
if resourceSpec == nil {
@@ -169,6 +185,15 @@ func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command,
return errors.New("no such resourceSpec")
}
+ var datasetName string
+ attach, err := models.GetAttachmentByUUID(uuid)
+ if err != nil {
+ //for benchmark, do not return error
+ log.Error("GetAttachmentByUUID failed:%v", err)
+ } else {
+ datasetName = attach.Name
+ }
+
jobResult, err := CreateJob(jobName, models.CreateJobParams{
JobName: jobName,
RetryCount: 1,
@@ -263,13 +288,19 @@ func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command,
BenchmarkTypeID: benchmarkTypeID,
BenchmarkChildTypeID: benchmarkChildTypeID,
Description: description,
+ IsLatestVersion: "1",
+ VersionCount: versionCount,
+ BranchName: branchName,
+ BootFile: bootFile,
+ DatasetName: datasetName,
+ Parameters: params,
})
if err != nil {
return err
}
- task, err := models.GetCloudbrainByName(jobName)
+ task, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByName failed: %v", err.Error())
return err
@@ -278,6 +309,8 @@ func GenerateTask(ctx *context.Context, displayJobName, jobName, image, command,
if string(models.JobTypeBenchmark) == jobType {
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateBenchMarkTask)
+ } else if string(models.JobTypeTrain) == jobType {
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, displayJobName, models.ActionCreateGPUTrainTask)
} else {
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, displayJobName, models.ActionCreateDebugGPUTask)
}
@@ -407,8 +440,10 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
return err
}
- idString := strconv.FormatInt(newTask.ID, 10)
- *newID = idString
+ stringId := strconv.FormatInt(newTask.ID, 10)
+ *newID = stringId
+
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, stringId, task.DisplayJobName, models.ActionCreateDebugGPUTask)
return nil
}
diff --git a/modules/context/repo.go b/modules/context/repo.go
index 64f02c921..7c425c8c0 100755
--- a/modules/context/repo.go
+++ b/modules/context/repo.go
@@ -475,6 +475,8 @@ func RepoAssignment() macaron.Handler {
if ctx.IsSigned {
ctx.Data["IsWatchingRepo"] = models.IsWatching(ctx.User.ID, repo.ID)
ctx.Data["IsStaringRepo"] = models.IsStaring(ctx.User.ID, repo.ID)
+
+ ctx.Data["IsStaringDataset"] = models.IsDatasetStaringByRepoId(ctx.User.ID, repo.ID)
}
if repo.IsFork {
diff --git a/modules/dataset/dataset.go b/modules/dataset/dataset.go
new file mode 100644
index 000000000..a180af184
--- /dev/null
+++ b/modules/dataset/dataset.go
@@ -0,0 +1,17 @@
+package dataset
+
+func GetResourceType(cloudbrainType int) string {
+ if cloudbrainType == 0 {
+ return "CPU/GPU"
+ } else {
+ return "NPU"
+ }
+}
+
+func GetStatusText(isPrivate bool) string {
+ if isPrivate {
+ return "dataset.private"
+ } else {
+ return "dataset.public"
+ }
+}
diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go
index b740b1167..538fcfbd9 100755
--- a/modules/modelarts/modelarts.go
+++ b/modules/modelarts/modelarts.go
@@ -51,10 +51,11 @@ const (
DataUrl = "data_url"
ResultUrl = "result_url"
CkptUrl = "ckpt_url"
+ DeviceTarget = "device_target"
+ Ascend = "Ascend"
PerPage = 10
IsLatestVersion = "1"
NotLatestVersion = "0"
- DebugType = -1
VersionCount = 1
SortByCreateTime = "create_time"
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
index 2a29dd700..26f068193 100755
--- a/modules/setting/setting.go
+++ b/modules/setting/setting.go
@@ -165,6 +165,7 @@ var (
ExplorePagingNum int
ContributorPagingNum int
IssuePagingNum int
+ DatasetPagingNum int
RepoSearchPagingNum int
MembersPagingNum int
FeedMaxCommitNum int
@@ -207,6 +208,7 @@ var (
ExplorePagingNum: 20,
ContributorPagingNum: 50,
IssuePagingNum: 10,
+ DatasetPagingNum: 5,
RepoSearchPagingNum: 10,
MembersPagingNum: 20,
FeedMaxCommitNum: 5,
@@ -435,7 +437,7 @@ var (
//home page
RecommentRepoAddr string
-
+ ESSearchURL string
//notice config
UserNameOfNoticeRepo string
RepoNameOfNoticeRepo string
@@ -450,16 +452,18 @@ var (
DecompressOBSTaskName string
//cloudbrain config
- CBAuthUser string
- CBAuthPassword string
- RestServerHost string
- JobPath string
- CBCodePathPrefix string
- JobType string
- GpuTypes string
- DebugServerHost string
- ResourceSpecs string
- MaxDuration int64
+ CBAuthUser string
+ CBAuthPassword string
+ RestServerHost string
+ JobPath string
+ CBCodePathPrefix string
+ JobType string
+ GpuTypes string
+ DebugServerHost string
+ ResourceSpecs string
+ MaxDuration int64
+ TrainGpuTypes string
+ TrainResourceSpecs string
//benchmark config
IsBenchmarkEnabled bool
@@ -512,9 +516,9 @@ var (
ProfileID string
PoolInfos string
Flavor string
- DebugHost string
- ImageInfos string
- Capacity int
+ DebugHost string
+ ImageInfos string
+ Capacity int
//train-job
ResourcePools string
Engines string
@@ -1263,6 +1267,7 @@ func NewContext() {
sec = Cfg.Section("homepage")
RecommentRepoAddr = sec.Key("Address").MustString("https://git.openi.org.cn/OpenIOSSG/promote/raw/branch/master/")
+ ESSearchURL = sec.Key("ESSearchURL").MustString("http://192.168.207.94:9200")
sec = Cfg.Section("notice")
UserNameOfNoticeRepo = sec.Key("USER_NAME").MustString("OpenIOSSG")
@@ -1283,6 +1288,8 @@ func NewContext() {
GpuTypes = sec.Key("GPU_TYPES").MustString("")
ResourceSpecs = sec.Key("RESOURCE_SPECS").MustString("")
MaxDuration = sec.Key("MAX_DURATION").MustInt64(14400)
+ TrainGpuTypes = sec.Key("TRAIN_GPU_TYPES").MustString("")
+ TrainResourceSpecs = sec.Key("TRAIN_RESOURCE_SPECS").MustString("")
sec = Cfg.Section("benchmark")
IsBenchmarkEnabled = sec.Key("ENABLED").MustBool(false)
diff --git a/modules/templates/helper.go b/modules/templates/helper.go
index 3d31b611c..77c6fca8d 100755
--- a/modules/templates/helper.go
+++ b/modules/templates/helper.go
@@ -23,6 +23,8 @@ import (
"time"
"unicode"
+ "code.gitea.io/gitea/modules/dataset"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/emoji"
@@ -86,20 +88,22 @@ func NewFuncMap() []template.FuncMap {
"AllowedReactions": func() []string {
return setting.UI.Reactions
},
- "AvatarLink": models.AvatarLink,
- "Safe": Safe,
- "SafeJS": SafeJS,
- "Str2html": Str2html,
- "TimeSince": timeutil.TimeSince,
- "TimeSinceUnix": timeutil.TimeSinceUnix,
- "TimeSinceUnix1": timeutil.TimeSinceUnix1,
- "TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
- "RawTimeSince": timeutil.RawTimeSince,
- "FileSize": base.FileSize,
- "PrettyNumber": base.PrettyNumber,
- "Subtract": base.Subtract,
- "EntryIcon": base.EntryIcon,
- "MigrationIcon": MigrationIcon,
+ "AvatarLink": models.AvatarLink,
+ "Safe": Safe,
+ "SafeJS": SafeJS,
+ "Str2html": Str2html,
+ "TimeSince": timeutil.TimeSince,
+ "TimeSinceUnix": timeutil.TimeSinceUnix,
+ "TimeSinceUnix1": timeutil.TimeSinceUnix1,
+ "AttachmentResourceType": dataset.GetResourceType,
+ "AttachmentStatus": dataset.GetStatusText,
+ "TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "RawTimeSince": timeutil.RawTimeSince,
+ "FileSize": base.FileSize,
+ "PrettyNumber": base.PrettyNumber,
+ "Subtract": base.Subtract,
+ "EntryIcon": base.EntryIcon,
+ "MigrationIcon": MigrationIcon,
"Add": func(a, b int) int {
return a + b
},
@@ -340,11 +344,13 @@ func NewTextFuncMap() []texttmpl.FuncMap {
"AppDomain": func() string {
return setting.Domain
},
- "TimeSince": timeutil.TimeSince,
- "TimeSinceUnix": timeutil.TimeSinceUnix,
- "TimeSinceUnix1": timeutil.TimeSinceUnix1,
- "TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
- "RawTimeSince": timeutil.RawTimeSince,
+ "TimeSince": timeutil.TimeSince,
+ "TimeSinceUnix": timeutil.TimeSinceUnix,
+ "TimeSinceUnix1": timeutil.TimeSinceUnix1,
+ "TimeSinceUnixShort": timeutil.TimeSinceUnixShort,
+ "RawTimeSince": timeutil.RawTimeSince,
+ "AttachmentResourceType": dataset.GetResourceType,
+ "AttachmentStatus": dataset.GetStatusText,
"DateFmtLong": func(t time.Time) string {
return t.Format(time.RFC1123Z)
},
@@ -746,5 +752,5 @@ func licenses() []string {
// Dataset tasks
func tasks() []string {
- return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_recognition", "speech_synthesis"}
+ return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"}
}
diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini
index 3ebe5a9b5..09bb5015f 100755
--- a/options/locale/locale_en-US.ini
+++ b/options/locale/locale_en-US.ini
@@ -254,6 +254,18 @@ page_dev_yunlao_desc3=Developers can freely choose the corresponding computing r
page_dev_yunlao_desc4=If your model requires more computing resources, you can also apply for it separately.
page_dev_yunlao_apply=Apply Separately
+search=Search
+search_repo=Repository
+search_dataset=DataSet
+search_issue=Issue
+search_pr=Pull Request
+search_user=User
+search_org=Organization
+search_finded=Find
+search_related=related
+search_maybe=maybe
+search_ge=
+
[explore]
repos = Repositories
select_repos = Select the project
@@ -723,8 +735,13 @@ alert = To initiate a cloud brain task, please upload the dataset in zip format.
dataset = Dataset
dataset_setting= Dataset Setting
title = Name
+title_format_err=Name can only contain number,letter,'-','_' or '.', and can be up to 100 characters long.
description = Description
+description_format_err=Description's length can be up to 1024 characters long.
create_dataset = Create Dataset
+create_dataset_fail=Failed to create dataset.
+query_dataset_fail=Failed to query dataset.
+edit_attachment_fail=Failed to update description.
show_dataset= Dataset
edit_dataset= Edit Dataset
update_dataset= Update Dataset
@@ -743,7 +760,8 @@ private = private
public = public
dir = directory
back = back
-copy_url=copy download url
+copy_url=Copy Download Url
+copy_md5 = Copy MD5
directory=preview of the datasets
create_label_task=create label task
visibility = visibility
@@ -794,12 +812,49 @@ category.computer_vision= computer vision
category.natural_language_processing= natural language processing
category.speech_processing= speech processing
category.computer_vision_natural_language_processing= computer vision and natural language processing
-attachment.delete= delete this version of dataset
+attachment.delete= Delete this version of dataset
attachment.delete_desc= Are you sure you will delete this version of dataset, once deleted can not be recovery
public= public
private= private
-delete= delete
-
+delete= Delete
+select_dataset=Select Dataset
+current_project=Current Project
+owner_dataset=Owner Dataset
+public_dataset=Public Dataset
+I_liked = I Liked
+use = Use
+create_new_dataset = Create New Dataset
+dataset_name = Dataset Name
+dataset_description = Dataset Description
+select_category = Select Category
+select_task = Select Research Direction/Application Area
+dataset_name_tooltips = Please enter letters, numbers, _ and - up to 100 characters.
+dataset_no_create = No dataset has been created yet
+dataset_explain = Dataset: CloudBrain I provides CPU/GPU resources, Cloudbrain II provides Ascend NPU resources, and the data set used for debugging also needs to be uploaded to the corresponding environment;
+dataset_instructions_for_use = Instructions for use: You can refer to Qizhi AI Collaboration Platform
+dataset_camp_course = Newcomer Training Camp Course;
+dataset_upload = Upload
+dataset_file_name = File Name
+dataset_available_clusters = Available Clusters
+dataset_upload_time = Upload Time
+download = Download
+modify_description = Modify Description
+set_public = Set Public
+set_private = Set Private
+annotation = Annotation
+upload_dataset_file = Upload Dataset File
+file_description = File Description
+data_upload = Dataset Upload
+illustrate = Illustrate
+illustrate.only = Only Datasets In
+illustrate.zip = zip/tar.gz Format
+illustrate.fisrt_end = Can Initiate Cloudbrain Tasks
+modify_dataset = Modify Dataset
+modify_dataset_description = Modify Dataset Description
+search_dataset = Search Dataset Files
+unzip_tooltips = If it has not been decompressed for a long time, please check whether the compressed package has encrypted files or file errors
+zip_failed = Decompression failed, please check whether the compressed package is encrypted or contact technical support
+dataset_desc = The description should not exceed 1024 characters
[repo]
owner = Owner
repo_name = Repository Name
@@ -829,7 +884,7 @@ repo_label_helpe = Press Enter to complete
issue_labels = Issue Labels
issue_labels_helper = Select an issue label set.
license = License
-license_helper = Select a license file.
+license_helper = Select a license file
readme = README
readme_helper = Select a README file template.
auto_init = Initialize Repository (Adds .gitignore, License and README)
@@ -870,7 +925,7 @@ gpu_type_all=All
model_download=Model Download
submit_image=Submit Image
download=Download
-score=score
+score=Score
cloudbrain=Cloudbrain
cloudbrain.new=New cloudbrain
@@ -885,7 +940,7 @@ cloudbrain1 = cloudbrain1
cloudbrain2 = cloudbrain2
cloudbrain_selection = select cloudbrain
cloudbrain_platform_selection = Select the cloudbrain platform you want to use:
-confirm_choice = confirm
+confirm_choice = Confirm
cloudbran1_tips = Only data in zip format can create cloudbrain tasks
cloudbrain_creator=Creator
cloudbrain_task = Task Name
@@ -967,7 +1022,8 @@ modelarts.train_job.parameter_value=Parameter Value
modelarts.train_job.resource_setting=resource_setting
modelarts.train_job.resource_setting_info=resource_setting_info
modelarts.train_job.resource_pool=resource_pool
-modelarts.train_job.resource_type=resource_type
+modelarts.train_job.resource_type=Resource Type
+modelarts.train_job.train_dataset=Train Dataset
modelarts.train_job.standard=Standard
modelarts.train_job.NAS_address=NAS Address
modelarts.train_job.NAS_mount_path=NAS Mount Path
@@ -1095,6 +1151,7 @@ unstar = Unstar
star = Star
fork = Fork
download_archive = Download Repository
+star_fail=Failed to %s the dataset.
no_desc = No Description
no_label = No labels
@@ -2746,10 +2803,11 @@ reject_pull_request = `suggested changes for %s#%[2]s`
upload_dataset=`upload dataset %s`
task_gpudebugjob=`created CPU/GPU type debugging task%s`
task_npudebugjob=`created NPU type debugging task %s`
-task_trainjob=`created training task%s`
+task_nputrainjob=`created NPU training task%s`
task_inferencejob=`created reasoning task %s`
task_benchmark=`created profiling task %s`
task_createmodel=`created new model %s`
+task_gputrainjob=`created CPU/GPU training task%s`
[tool]
ago = %s ago
diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini
index 7078bdb52..d26065363 100755
--- a/options/locale/locale_zh-CN.ini
+++ b/options/locale/locale_zh-CN.ini
@@ -256,6 +256,18 @@ page_dev_yunlao_desc3=开发者可以根据使用需求,自由选择相应计
page_dev_yunlao_desc4=如果您的模型需要更多的计算资源,也可以单独申请
page_dev_yunlao_apply=单独申请
+search=搜索
+search_repo=项目
+search_dataset=数据集
+search_issue=任务
+search_pr=合并请求
+search_user=用户
+search_org=组织
+search_finded=找到
+search_related=相关
+search_maybe=约为
+search_ge=个
+
[explore]
repos=项目
select_repos=精选项目
@@ -726,8 +738,14 @@ alert=如果要发起云脑任务,请上传zip格式的数据集
dataset=数据集
dataset_setting=数据集设置
title=名称
+title_format_err=名称最多允许输入100个字符,只允许字母,数字,中划线 (‘-’),下划线 (‘_’) 和点 (‘.’) 。
description=描述
+description_format_err=描述最多允许输入1024个字符。
create_dataset=创建数据集
+create_dataset_fail=创建数据集失败。
+query_dataset_fail=查询数据集失败。
+edit_attachment_fail=修改描述失败。
+
show_dataset=数据集
edit_dataset=编辑数据集
update_dataset=更新数据集
@@ -803,6 +821,44 @@ attachment.delete_desc= 你确定要删除该版本的数据集么?一旦删
public=公有
private=私有
delete=删除
+select_dataset=选择数据集
+current_project=当前项目
+owner_dataset=我的数据集
+public_dataset=公开数据集
+I_liked=我收藏的
+use=使用
+create_new_dataset = 新建数据集
+dataset_name=数据集名称
+dataset_description = 数据集描述
+select_category = 选择分类
+select_task = 选择研究方向/应用领域
+dataset_name_tooltips = 请输入字母、数字、_和-,最长100个字符。
+dataset_no_create = 还未创建过数据集
+dataset_explain = 数据集:云脑1提供 CPU / GPU 资源,云脑2提供 Ascend NPU 资源,调试使用的数据集也需要上传到对应的环境;
+dataset_instructions_for_use = 使用说明:可以参考启智AI协作平台
+dataset_camp_course = 小白训练营课程
+dataset_upload = 上传
+dataset_file_name = 文件名称
+dataset_available_clusters = 可用集群
+dataset_upload_time = 上传时间
+download = 下载
+modify_description = 修改描述
+set_public = 设为公开
+set_private = 设为私有
+annotation = 标注
+upload_dataset_file = 上传数据集文件
+file_description = 文件描述
+data_upload = 数据上传
+illustrate = 说明
+illustrate.only = 只有
+illustrate.zip = zip/tar.gz格式
+illustrate.fisrt_end = 的数据集才能发起云脑任务
+modify_dataset = 修改数据集
+modify_dataset_description = 修改数据集文件描述
+search_dataset = 搜索数据集文件
+unzip_tooltips = 如果长时间未解压,请检查压缩包是否有加密文件或者文件错误
+zip_failed = 解压失败,请检查压缩包是否有加密或者联系技术支持人员。
+dataset_desc = 描述字数不超过1024个字符
[repo]
owner=拥有者
@@ -833,7 +889,7 @@ repo_label_helpe=输入完成后回车键完成标签确定。
issue_labels=任务标签
issue_labels_helper=选择一个任务标签集
license=授权许可
-license_helper=选择授权许可文件。
+license_helper=选择授权许可文件
readme=自述
readme_helper=选择自述文件模板。
auto_init=初始化存储库 (添加. gitignore、许可证和自述文件)
@@ -1101,6 +1157,8 @@ unstar=取消点赞
star=点赞
fork=派生
download_archive=下载此项目
+star_fail=%s失败。
+
no_desc=暂无描述
no_label = 暂无标签
@@ -2751,10 +2809,11 @@ reject_pull_request=`建议变更 %s#%[2]s`
upload_dataset=`上传了数据集文件 %s`
task_gpudebugjob=`创建了CPU/GPU类型调试任务 %s`
task_npudebugjob=`创建了NPU类型调试任务 %s`
-task_trainjob=`创建了训练任务 %s`
+task_nputrainjob=`创建了NPU类型训练任务 %s`
task_inferencejob=`创建了推理任务 %s`
task_benchmark=`创建了评测任务 %s`
task_createmodel=`导入了新模型 %s`
+task_gputrainjob=`创建了CPU/GPU类型训练任务 %s`
[tool]
ago=%s前
diff --git a/public/home/home.js b/public/home/home.js
old mode 100644
new mode 100755
index 7512a4423..478c70f21
--- a/public/home/home.js
+++ b/public/home/home.js
@@ -135,7 +135,7 @@ socket.onmessage = function (e) {
html += recordPrefix + actionName;
html += " " + getRepotext(record) + ""
}
- else if(record.OpType == "24" || record.OpType == "26" || record.OpType == "27" || record.OpType == "28" || record.OpType == "30"){
+ else if(record.OpType == "24" || record.OpType == "26" || record.OpType == "27" || record.OpType == "28" || record.OpType == "30" || record.OpType == "31"){
html += recordPrefix + actionName;
html += " " + record.RefName + ""
}
@@ -175,6 +175,8 @@ function getTaskLink(record){
re = re + "/cloudbrain/benchmark/" + record.Content;
}else if(record.OpType == 30){
re = re + "/modelmanage/show_model_info?name=" + record.RefName;
+ }else if(record.OpType == 31){
+ re = re + "/cloudbrain/train-job/" + record.Content;
}
re = encodeURI(re);
return re;
@@ -321,10 +323,11 @@ var actionNameZH={
"24":"上传了数据集文件",
"25":"创建了CPU/GPU类型调试任务",
"26":"创建了NPU类型调试任务",
- "27":"创建了训练任务",
+ "27":"创建了NPU类型训练任务",
"28":"创建了推理任务",
"29":"创建了评测任务",
- "30":"导入了新模型"
+ "30":"导入了新模型",
+ "31":"创建了CPU/GPU类型训练任务"
};
var actionNameEN={
@@ -346,10 +349,11 @@ var actionNameEN={
"24":" upload dataset ",
"25":" created CPU/GPU type debugging task ",
"26":" created NPU type debugging task ",
- "27":" created training task",
+ "27":" created NPU type training task",
"28":" created reasoning task",
"29":" created profiling task",
- "30":" created new model"
+ "30":" created new model",
+ "31":" created CPU/GPU type training task",
};
var repoAndOrgZH={
diff --git a/public/home/search.js b/public/home/search.js
new file mode 100644
index 000000000..70b5d4ef9
--- /dev/null
+++ b/public/home/search.js
@@ -0,0 +1,1281 @@
+var token;
+if(isEmpty(token)){
+ var meta = $("meta[name=_uid]");
+ if(!isEmpty(meta)){
+ token = meta.attr("content");
+ console.log("token is uid:" + token);
+ }
+}
+
+var html =document.documentElement;
+var lang = html.attributes["lang"]
+var isZh = true;
+if(lang != null && lang.nodeValue =="en-US" ){
+ console.log("the language is " + lang.nodeValue);
+ isZh=false;
+}else{
+ console.log("default lang=zh");
+}
+function isEmpty(str){
+ if(typeof str == "undefined" || str == null || str == ""){
+ return true;
+ }
+ return false;
+}
+
+var itemType={
+ "1":"repository",
+ "2":"issue",
+ "3":"user",
+ "4":"org",
+ "5":"dataset",
+ "6":"pr"
+};
+
+var sortBy={
+ "10":"default",
+ "11":"updated_unix.keyword",
+ "12":"num_watches",
+ "13":"num_stars",
+ "14":"num_forks",
+ "20":"default",
+ "21":"updated_unix.keyword",
+ "30":"default",
+ "31":"name.keyword",
+ "32":"name.keyword",
+ "33":"created_unix.keyword",
+ "34":"created_unix.keyword",
+ "40":"default",
+ "41":"name.keyword",
+ "42":"name.keyword",
+ "43":"created_unix.keyword",
+ "44":"created_unix.keyword",
+ "50":"default",
+ "51":"download_times",
+ "60":"default",
+ "61":"updated_unix.keyword"
+};
+
+var sortAscending={
+ "10":"false",
+ "11":"false",
+ "12":"false",
+ "13":"false",
+ "14":"false",
+ "20":"false",
+ "21":"false",
+ "30":"false",
+ "31":"true",
+ "32":"false",
+ "33":"false",
+ "34":"true",
+ "40":"false",
+ "41":"true",
+ "42":"false",
+ "43":"false",
+ "44":"true",
+ "50":"false",
+ "51":"false",
+ "60":"false",
+ "61":"false"
+};
+
+var currentPage = 1;
+var pageSize = 15;
+var currentSearchTableName ="repository";
+var currentSearchKeyword="";
+var currentSearchSortBy="";
+var currentSearchAscending="false";
+var OnlySearchLabel=false;
+var startIndex =1;
+var endIndex = 5;
+var totalPage = 1;
+var totalNum = 0;
+var privateTotal = 0;
+
+function initPageInfo(){
+ currentPage = 1;
+ startIndex =1;
+ endIndex = 5;
+}
+
+function searchItem(type,sortType){
+ console.log("enter item 2.");
+ currentSearchKeyword = document.getElementById("keyword_input").value;
+ if(!isEmpty(currentSearchKeyword)){
+ initPageInfo();
+ currentSearchTableName = itemType[type];
+ currentSearchSortBy = sortBy[sortType];
+ currentSearchAscending = sortAscending[sortType];
+ OnlySearchLabel =false;
+
+ page(currentPage);
+ }
+}
+
+
+
+function search(){
+ console.log("enter here 1.");
+ currentSearchKeyword = document.getElementById("keyword_input").value;
+ if(!isEmpty(currentSearchKeyword)){
+ currentSearchKeyword = currentSearchKeyword.trim();
+ }
+ $('#searchForm').addClass("hiddenSearch");
+ initPageInfo();
+ if(!isEmpty(currentSearchKeyword)){
+ document.getElementById("find_id").innerHTML=getLabel(isZh,"search_finded");
+ currentSearchSortBy = sortBy[10];
+ currentSearchAscending = "false";
+ OnlySearchLabel =false;
+ page(currentPage);
+ if(currentSearchTableName != "repository"){
+ doSearch("repository",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ if(currentSearchTableName != "issue"){
+ doSearch("issue",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ if(currentSearchTableName != "user"){
+ doSearch("user",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ if(currentSearchTableName != "org"){
+ doSearch("org",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ if(currentSearchTableName != "dataset"){
+ doSearch("dataset",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ if(currentSearchTableName != "pr"){
+ doSearch("pr",currentSearchKeyword,1,pageSize,true,"",false);
+ }
+ }else{
+ initDiv(false);
+ document.getElementById("find_id").innerHTML=getLabel(isZh,"search_empty");
+ $('#find_title').html("");
+ document.getElementById("sort_type").innerHTML="";
+ document.getElementById("child_search_item").innerHTML="";
+ document.getElementById("page_menu").innerHTML="";
+ $('#repo_total').text("");
+ $('#pr_total').text("");
+ $('#issue_total').text("");
+ $('#dataset_total').text("");
+ $('#user_total').text("");
+ $('#org_total').text("");
+ setActivate(null);
+ }
+}
+
+function initDiv(isSearchLabel=false){
+ if(isSearchLabel){
+ document.getElementById("search_div").style.display="none";
+ document.getElementById("search_label_div").style.display="block";
+ document.getElementById("dataset_item").style.display="none";
+ document.getElementById("issue_item").style.display="none";
+ document.getElementById("pr_item").style.display="none";
+ document.getElementById("user_item").style.display="none";
+ document.getElementById("org_item").style.display="none";
+ document.getElementById("find_id").innerHTML="";
+
+ }else{
+ document.getElementById("search_div").style.display="block";
+ document.getElementById("search_label_div").style.display="none";
+ document.getElementById("dataset_item").style.display="block";
+ document.getElementById("issue_item").style.display="block";
+ document.getElementById("pr_item").style.display="block";
+ document.getElementById("user_item").style.display="block";
+ document.getElementById("org_item").style.display="block";
+ document.getElementById("find_id").innerHTML=getLabel(isZh,"search_finded");
+ }
+}
+
+function doSearchLabel(tableName,keyword,sortBy="",ascending="false"){
+ initDiv(true);
+ //document.getElementById("search_div").style.display="none";
+ //document.getElementById("search_label_div").style.display="block";
+ document.getElementById("search_label_div").innerHTML="
#" + keyword + "
";
+
+ currentSearchKeyword = keyword;
+ initPageInfo();
+ currentSearchTableName = tableName;
+ currentSearchSortBy = sortBy;
+ currentSearchAscending = ascending;
+ OnlySearchLabel =true;
+
+ page(currentPage);
+}
+
+function searchLabel(tableName,keyword,sortBy="",ascending="false"){
+
+ sessionStorage.setItem("keyword",keyword);
+ sessionStorage.setItem("tableName",tableName);
+ sessionStorage.setItem("searchLabel",true);
+ sessionStorage.setItem("sortBy",sortBy);
+ sessionStorage.setItem("ascending",ascending);
+ console.log("enter label search.");
+ window.open("/all/search/");
+}
+
+function doSearch(tableName,keyword,page,pageSize=15,onlyReturnNum=true,sortBy="",OnlySearchLabel=false){
+ var language = "zh-CN";
+ if(!isZh){
+ language="en-US";
+ }
+ $.ajax({
+ type:"GET",
+ url:"/all/dosearch/",
+ headers: {
+ authorization:token,
+ },
+ dataType:"json",
+ data:{
+ 'TableName': tableName,
+ 'Key': keyword,
+ 'Page': page,
+ 'PageSize': pageSize,
+ 'OnlyReturnNum':onlyReturnNum,
+ 'SortBy':sortBy,
+ 'OnlySearchLabel':OnlySearchLabel,
+ 'Ascending':currentSearchAscending,
+ 'WebTotal':totalNum,
+ 'PrivateTotal':privateTotal,
+ 'language':language
+ },
+ async:true,
+ success:function(json){
+ console.log("tableName=" + tableName);
+ console.log(json);
+ displayResult(tableName,page,json,onlyReturnNum,keyword);
+ },
+ error:function(response) {
+ console.log(response);
+ }
+ });
+}
+
+function displayResult(tableName,page,jsonResult,onlyReturnNum,keyword){
+ if(tableName == "repository") {
+ displayRepoResult(page,jsonResult,onlyReturnNum,keyword);
+ } else if (tableName == "issue") {
+ displayIssueResult(page,jsonResult,onlyReturnNum,keyword);
+ } else if (tableName == "user") {
+ displayUserResult(page,jsonResult,onlyReturnNum,keyword);
+ } else if (tableName == "org") {
+ displayOrgResult(page,jsonResult,onlyReturnNum,keyword);
+ } else if (tableName == "dataset") {
+ displayDataSetResult(page,jsonResult,onlyReturnNum,keyword);
+ } else if (tableName == "pr") {
+ displayPrResult(page,jsonResult,onlyReturnNum,keyword);
+ }
+ if(!onlyReturnNum){
+ console.log("set total num." + tableName);
+ totalPage =Math.ceil(jsonResult.Total/pageSize);
+ totalNum = jsonResult.Total;
+ privateTotal = jsonResult.PrivateTotal;
+ setPage(page);
+ }
+
+}
+
+function displayPrResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#pr_total').text(total);
+ if(!onlyReturnNum){
+ setActivate("pr_item");
+ //$('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_pr"));
+ //$('#child_total').text(total);
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_pr")).replace('{total}',total));
+
+ setIssueOrPrInnerHtml(data,"pulls");
+ }
+}
+
+var categoryDesc={
+ "computer_vision":"计算机视觉",
+ "natural_language_processing":"自然语言处理",
+ "speech_processing":"语音处理",
+ "computer_vision_natural_language_processing":"计算机视觉、自然语言处理"
+};
+
+var categoryENDesc={
+ "computer_vision":"computer vision",
+ "natural_language_processing":"natural language processing",
+ "speech_processing":"speech processing",
+ "computer_vision_natural_language_processing":"computer vision and natural language processing"
+};
+
+var taskDesc={
+ "machine_translation":"机器翻译",
+ "question_answering_system":"问答系统",
+ "information_retrieval":"信息检索",
+ "knowledge_graph":"知识图谱",
+ "text_annotation":"文本标注",
+ "text_categorization":"文本分类",
+ "emotion_analysis":"情感分析",
+ "language_modeling":"语言建模",
+ "speech_recognition":"语音识别",
+ "automatic_digest":"自动文摘",
+ "information_extraction":"信息抽取",
+ "description_generation":"说明生成",
+ "image_classification":"图像分类",
+ "face_recognition":"人脸识别",
+ "image_search":"图像搜索",
+ "target_detection":"目标检测",
+ "image_description_generation":"图像描述生成",
+ "vehicle_license_plate_recognition":"车辆车牌识别",
+ "medical_image_analysis":"医学图像分析",
+ "unmanned":"无人驾驶",
+ "unmanned_security":"无人安防",
+ "drone":"无人机",
+ "vr_ar":"VR/AR",
+ "2_d_vision":"2-D视觉",
+ "2_5_d_vision":"2.5-D视觉",
+ "3_d_reconstruction":"3D重构",
+ "image_processing":"图像处理",
+ "video_processing":"视频处理",
+ "visual_input_system":"视觉输入系统",
+ "speech_coding":"语音编码",
+ "speech_enhancement":"语音增强",
+ "speech_recognition":"语音识别",
+ "speech_synthesis":"语音合成"
+};
+
+var taskENDesc={
+ "machine_translation":"machine translation",
+ "question_answering_system":"question answering system",
+ "information_retrieval":"information retrieval",
+ "knowledge_graph":"knowledge graph",
+ "text_annotation":"text annotation",
+ "text_categorization":"text categorization",
+ "emotion_analysis":"emotion analysis",
+ "language_modeling":"language modeling",
+ "speech_recognition":"speech recognition",
+ "automatic_digest":"automatic digest",
+ "information_extraction":"information extraction",
+ "description_generation":"description generation",
+ "image_classification":"image classification",
+ "face_recognition":"face recognition",
+ "image_search":"image search",
+ "target_detection":"target detection",
+ "image_description_generation":"image description generation",
+ "vehicle_license_plate_recognition":"vehicle license plate recognition",
+ "medical_image_analysis":"medical image analysis",
+ "unmanned":"unmanned",
+ "unmanned_security":"unmanned security",
+ "drone":"drone",
+ "vr_ar":"VR/AR",
+ "2_d_vision":"2.D vision",
+ "2.5_d_vision":"2.5D vision",
+ "3_d_reconstruction":"3Dreconstruction",
+ "image_processing":"image processing",
+ "video_processing":"video processing",
+ "visual_input_system":"visual input system",
+ "speech_coding":"speech coding",
+ "speech_enhancement":"speech enhancement",
+ "speech_recognition":"speech recognition",
+ "speech_synthesis":"speech synthesis"
+};
+
+function getCategoryDesc(isZh,key){
+ var re = key;
+ if(isZh){
+ re = categoryDesc[key];
+ }else{
+ re = categoryENDesc[key];
+ }
+ if(isEmpty(re)){
+ return key;
+ }
+ return re;
+}
+
+function getTaskDesc(isZh,key){
+ var re = key;
+ if(isZh){
+ re = taskDesc[key];
+ }else{
+ re = taskENDesc[key];
+ }
+ if(isEmpty(re)){
+ return key;
+ }
+ return re;
+}
+
+function getActiveItem(sort_type){
+ console.log("currentSearchSortBy=" + currentSearchSortBy + " sort_type=" + sortBy[sort_type]);
+ if(currentSearchSortBy == sortBy[sort_type] && currentSearchAscending == sortAscending[sort_type]){
+ return "active ";
+ }else{
+ return "";
+ }
+}
+
+function displayDataSetResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#dataset_total').text(total);
+ if(!onlyReturnNum){
+ setActivate("dataset_item");
+ //$('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_dataset"));
+ //$('#child_total').text(total);
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_dataset")).replace('{total}',total));
+
+ var sortHtml = "";
+ sortHtml +="";
+ sortHtml +="";
+ document.getElementById("sort_type").innerHTML=sortHtml;
+
+ var html = "";
+ var currentTime = new Date().getTime();
+ for(var i = 0; i < data.length;i++){
+ var recordMap = data[i];
+ html += "";
+ html += "
";
+ html += "
" ;
+ if(!isEmpty(recordMap["category"])){
+ html += " " + getCategoryDesc(isZh,recordMap["category"]) + "";
+ }
+ if(!isEmpty(recordMap["task"])){
+ html += " " + getTaskDesc(isZh,recordMap["task"]) + "";
+ }
+ html += " " +recordMap["download_times"] + " ";
+ html +="
";
+ html += "
";
+ html += "
";
+ html += "
" + recordMap["description"] + "
";
+ if(!isEmpty(recordMap["file_name"])){
+ html += "
" + recordMap["file_name"] + "
";
+ }
+ html +="
";
+ html +=" "+ getLabel(isZh,"search_lasted_update") + " " + recordMap["updated_html"];
+ html +="
";
+ html +="
";
+ html +="
";
+ html +="
";
+ }
+ document.getElementById("child_search_item").innerHTML=html;
+ }
+}
+
+function displayOrgResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#org_total').text(total);
+ if(!onlyReturnNum){
+ setActivate("org_item");
+ //$('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_org"));
+ //$('#child_total').text(total);
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_org")).replace('{total}',total));
+
+ var sortHtml = "";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ document.getElementById("sort_type").innerHTML=sortHtml;
+
+ var html = "";
+ var currentTime = new Date().getTime();
+ for(var i = 0; i < data.length;i++){
+ var recordMap = data[i];
+ html += "";
+ html += "

";
+ html += "
";
+ html += "
";
+ html += "
";
+ html += "
" + recordMap["description"] + "
";
+ html +="
";
+ if(!isEmpty(recordMap["location"]) && recordMap["location"] != "null"){
+ html +=" " + recordMap["location"];
+ }
+ html +=" ";
+ if(!isEmpty(recordMap["website"]) && recordMap["website"] != "null"){
+ html +=" " + "" + recordMap["website"] + "";
+ }
+ html +=" "+ getLabel(isZh,"search_add_by") + " ";
+ html += recordMap["add_time"]
+ html +="
";
+ html +="
";
+ html +="
";
+ html +="
";
+ }
+ document.getElementById("child_search_item").innerHTML=html;
+ }
+}
+var monthDisplay=new Array("Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Spt","Oct","Nov","Dec");
+function displayUserResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#user_total').text(total);
+ if(!onlyReturnNum){
+ setActivate("user_item");
+ //$('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_user"));
+ //$('#child_total').text(total);
+
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_user")).replace('{total}',total));
+
+ var sortHtml = "";//equal user sort by
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+
+ document.getElementById("sort_type").innerHTML=sortHtml;
+
+ var html = "";
+ var currentTime = new Date().getTime();
+ for(var i = 0; i < data.length;i++){
+ var recordMap = data[i];
+ html += "";
+ html += "

";
+ html += "
";
+ html += "
";
+ html += "
";
+ html += "
" + recordMap["description"] + "
";
+ html +="
";
+ if(!isEmpty(recordMap["email"]) && recordMap["email"] != "null"){
+ html +=" " + recordMap["email"] + "";
+ }
+ html +=" "+ getLabel(isZh,"search_add_by") + " ";
+ html += recordMap["add_time"]
+ html +="
";
+ html +="
";
+ html +="
";
+ html +="
";
+ }
+ document.getElementById("child_search_item").innerHTML=html;
+ }
+}
+
+function setIssueOrPrInnerHtml(data,path){
+ var sortHtml = "";
+ if(path =="issues"){
+ sortHtml +="";
+ sortHtml +="";
+ }else{
+ sortHtml +="";
+ sortHtml +="";
+ }
+
+ document.getElementById("sort_type").innerHTML=sortHtml;
+
+ var html = "";
+ var currentTime = new Date().getTime();
+ for(var i = 0; i < data.length;i++){
+ var recordMap = data[i];
+ html += "";
+ html += "
";
+ html += "
";
+ html += "
";
+ html += "
" + recordMap["content"] + "
";
+ html +="
";
+ html +=" ";
+ html +=" " + addBlank(recordMap["repoUrl"]) +" #" + recordMap["index"] + " ";
+ html +=" ";
+ if(recordMap["is_closed"] != null && (!(recordMap["is_closed"]) || recordMap["is_closed"]=="f")){
+ html += getLabel(isZh,"search_open");
+ }else{
+ html += getLabel(isZh,"search_closed");
+ }
+ html +=" " + recordMap["num_comments"];
+
+ html +=" "+ getLabel(isZh,"search_lasted_update") + " "+ recordMap["updated_html"];
+
+ html +="
";
+ html +="
";
+ html +="
";
+ html +="
";
+ }
+ document.getElementById("child_search_item").innerHTML=html;
+}
+
+function addBlank(url){
+ if(url == null){
+ return url;
+ }
+ var tmps = url.split("/");
+ if(tmps.length == 2){
+ return tmps[0] + " / " + tmps[1];
+ }
+ return url;
+}
+
+function displayIssueResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#issue_total').text(total);
+ if(!onlyReturnNum){
+ setActivate("issue_item");
+ //$('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_issue"));
+ //$('#child_total').text(total);
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_issue")).replace('{total}',total));
+
+ setIssueOrPrInnerHtml(data,"issues");
+ }
+}
+
+function setActivate(name){
+ $('#repo_item').removeClass("active");
+ $('#user_item').removeClass("active");
+ $('#issue_item').removeClass("active");
+ $('#dataset_item').removeClass("active");
+ $('#org_item').removeClass("active");
+ $('#pr_item').removeClass("active");
+ if(name==null){
+ return;
+ }
+ var tmp = "#" + name;
+ $(tmp).addClass("active");
+}
+
+function displayRepoResult(page,jsonResult,onlyReturnNum,keyword){
+ var data = jsonResult.Result;
+ var total = jsonResult.Total;
+ $('#repo_total').text(total);
+
+ if(!onlyReturnNum){
+ setActivate("repo_item");
+ // $('#keyword_desc').text(keyword);
+ //$('#obj_desc').text(getLabel(isZh,"search_repo"));
+ //$('#child_total').text(total);
+ $('#find_title').html(getLabel(isZh,"find_title").replace('{keyword}',keyword).replace('{tablename}',getLabel(isZh,"search_repo")).replace('{total}',total));
+
+ var sortHtml = "";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+ sortHtml +="";
+
+ document.getElementById("sort_type").innerHTML=sortHtml;
+
+ var html = "";
+ var currentTime = new Date().getTime();
+ for(var i = 0; i < data.length;i++){
+ var recordMap = data[i];
+ html += "";
+ if(!isEmpty(recordMap['avatar'])){
+ html += "

";
+ }
+ html += "
";
+ html += "
";
+ html += "
";
+ html += "
" + recordMap["description"] + "
";
+ html += "
";
+ if(!isEmpty(recordMap["topics"]) && recordMap["topics"] !="null"){
+ for(var j = 0; j < recordMap["topics"].length;j++){
+ //function searchLabel(tableName,keyword,sortBy="",ascending=false)
+ html +="
"+ recordMap["hightTopics"][j] + "
";
+ }
+ }
+ html +="
";
+ html +="
";
+ html +=" " + recordMap["num_watches"] + " " + recordMap["num_stars"] + " " + recordMap["num_forks"] +" ";
+ html +=" "+ getLabel(isZh,"search_lasted_update") + " " + recordMap["updated_html"];
+ if(!isEmpty(recordMap["lang"])){
+ var lang = recordMap["lang"]
+ var tmpLang = recordMap["lang"].split(",");
+ if(tmpLang.length>0){
+ lang = tmpLang[0]
+ }
+ var backColor = "#3572A5";
+ if(LanguagesColor[lang] != null){
+ backColor = LanguagesColor[lang];
+ }
+ html +=" " + lang + "";
+ }
+ html +="
";
+ html +="
";
+ html +="
";
+ html +="
";
+ }
+
+ document.getElementById("child_search_item").innerHTML=html;
+ }
+}
+
+function getTime(UpdatedUnix,currentTime){
+ UpdatedUnix = UpdatedUnix;
+ currentTime = currentTime / 1000;
+ var timeEscSecond = currentTime - UpdatedUnix;
+ if( timeEscSecond < 0){
+ timeEscSecond = 1;
+ }
+ console.log("currentTime=" + currentTime + " updateUnix=" + UpdatedUnix);
+
+ var hours= Math.floor(timeEscSecond / 3600);
+ //计算相差分钟数
+ var leave2 = Math.floor(timeEscSecond % (3600)); //计算小时数后剩余的秒数
+ var minutes= Math.floor(leave2 / 60);//计算相差分钟数
+
+ var leave3=Math.floor(leave2 % 60); //计算分钟数后剩余的秒数
+ var seconds= leave3;
+
+ if(hours == 0 && minutes == 0){
+ return seconds + getRepoOrOrg(6,isZh);
+ }else{
+ if(hours > 0){
+ if(hours >= 24){
+ var days = Math.ceil(hours/24)
+ if (days >= 30 && days <365){
+ return Math.ceil(days/30) + getRepoOrOrg(8,isZh);
+ }else if(days >= 365){
+ return Math.ceil(days/365) + getRepoOrOrg(9,isZh);
+ }
+ return Math.ceil(hours/24) + getRepoOrOrg(7,isZh);
+ }else{
+ return hours + getRepoOrOrg(4,isZh);
+ }
+ }else{
+ return minutes + getRepoOrOrg(5,isZh);
+ }
+ }
+}
+
+function getRepoOrOrg(key,isZhLang){
+ if(isZhLang){
+ return repoAndOrgZH[key];
+ }else{
+ return repoAndOrgEN[key];
+ }
+}
+
+var repoAndOrgZH={
+ "1":"项目",
+ "2":"成员",
+ "3":"团队",
+ "4":"小时前",
+ "5":"分钟前",
+ "6":"秒前",
+ "7":"天前",
+ "8":"个月前",
+ "9":"年前"
+};
+
+var repoAndOrgEN={
+ "1":"repository",
+ "2":"Members ",
+ "3":"Teams",
+ "4":" hours ago",
+ "5":" minutes ago",
+ "6":" seconds ago",
+ "7":" day ago",
+ "8":" month ago",
+ "9":" year ago"
+};
+
+
+
+
+function page(current){
+
+ currentPage=current;
+ doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel);
+
+ }
+
+ function nextPage(){
+ currentPage = currentPage+1;
+ console.log("currentPage=" + currentPage);
+ if(currentPage >= endIndex){
+ startIndex=startIndex+1;
+ endIndex = endIndex +1;
+ }
+ page(currentPage);
+ }
+
+ function prePage(){
+ console.log("currentPage=" + currentPage);
+ if(currentPage > 1){
+ currentPage = currentPage-1;
+ if(currentPage <= startIndex && startIndex > 1){
+ startIndex = startIndex -1;
+ endIndex = endIndex - 1;
+ }
+ console.log("currentPage=" + (currentPage));
+ page(currentPage);
+ }
+ }
+
+function getXPosition(e){
+ var x=e.offsetLeft;
+ while(e=e.offsetParent)
+ {
+ x+=e.offsetLeft;
+ }
+ return x+20;//-260防止屏幕超出
+}
+//获取y坐标
+function getYPosition(e){
+ var y=e.offsetTop;
+ while(e=e.offsetParent)
+ {
+ y+=e.offsetTop;
+ }
+ return y+20;//80为input高度
+}
+
+
+ function goPage(event){
+
+ var inputpage = document.getElementById("inputpage_div")
+ var left = getXPosition(event.target);
+ var top = getYPosition(event.target);
+ var goNum = $('#inputpage').val();
+ if (goNum<=0){
+ showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top);
+ }
+ else if(goNum<=totalPage){
+ page(goNum);
+ }
+ else{
+ showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top);
+ }
+ }
+
+ function showTip(tip, type,left,top) {
+ var $tip = $('#tipmsg');
+ var tipmsg = document.getElementById("tipmsg")
+ var style="z-index:10024;top:" + top + "px;left:" + left + "px;position:absolute;width:200px;height:60px;vertical-align:middle;";
+ console.log(style);
+ tipmsg.style = style;
+ var html ="" + tip + "
"
+ $tip.stop(true).prop('class', 'alert alert-' + type).html(html).fadeIn(500).delay(2000).fadeOut(500);
+ }
+
+ function setPage(currentPage){
+ console.log("totalPage=" + totalPage);
+ var html ="";
+ console.log("currentPage=" + currentPage);
+ console.log("privateTotal=" + privateTotal);
+ // if(totalPage==0){
+ // return;
+ // }
+ html += "" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + ""
+ if(currentPage > 1){
+ html += "" + getLabel(isZh,"search_home_page") + "";
+ html += "";
+ }else{
+ html += "" + getLabel(isZh,"search_home_page") + "";
+ html += "";
+ }
+
+ for(var i=startIndex; i <= endIndex; i++){
+ var page_i = i;
+ if(page_i > totalPage){
+ break;
+ }
+ if( page_i == currentPage){
+ html += "" + page_i + "";
+ }else{
+ html += "" + page_i + "";
+ }
+ }
+
+ if(currentPage >=totalPage){
+ html += "";
+ html += "" + getLabel(isZh,"search_last_page") + "";
+ }else{
+ html += "";
+ html += "" + getLabel(isZh,"search_last_page") + "";
+ }
+
+ html +=" " + getLabel(isZh,"search_go_to") + "
" + getLabel(isZh,"search_go_page") + "
";
+ console.log("html=" + html)
+ document.getElementById("page_menu").innerHTML=html;
+ $('#inputpage').on('keypress',function(event){
+ if(event.keyCode == 13){
+ goPage(event);
+ }
+ });
+ }
+
+$('#keyword_input').on('keypress',function(event){
+ if(event.keyCode == 13){
+ search();
+ }
+});
+
+
+
+
+
+var LanguagesColor = {
+ "1C Enterprise": "#814CCC",
+ "ABAP": "#E8274B",
+ "AGS Script": "#B9D9FF",
+ "AMPL": "#E6EFBB",
+ "ANTLR": "#9DC3FF",
+ "API Blueprint": "#2ACCA8",
+ "APL": "#5A8164",
+ "ASP": "#6a40fd",
+ "ATS": "#1ac620",
+ "ActionScript": "#882B0F",
+ "Ada": "#02f88c",
+ "Agda": "#315665",
+ "Alloy": "#64C800",
+ "AngelScript": "#C7D7DC",
+ "AppleScript": "#101F1F",
+ "Arc": "#aa2afe",
+ "AspectJ": "#a957b0",
+ "Assembly": "#6E4C13",
+ "Asymptote": "#4a0c0c",
+ "AutoHotkey": "#6594b9",
+ "AutoIt": "#1C3552",
+ "Ballerina": "#FF5000",
+ "Batchfile": "#C1F12E",
+ "BlitzMax": "#cd6400",
+ "Boo": "#d4bec1",
+ "Brainfuck": "#2F2530",
+ "C": "#555555",
+ "C#": "#178600",
+ "C++": "#f34b7d",
+ "CSS": "#563d7c",
+ "Ceylon": "#dfa535",
+ "Chapel": "#8dc63f",
+ "Cirru": "#ccccff",
+ "Clarion": "#db901e",
+ "Clean": "#3F85AF",
+ "Click": "#E4E6F3",
+ "Clojure": "#db5855",
+ "CoffeeScript": "#244776",
+ "ColdFusion": "#ed2cd6",
+ "Common Lisp": "#3fb68b",
+ "Common Workflow Language": "#B5314C",
+ "Component Pascal": "#B0CE4E",
+ "Crystal": "#000100",
+ "Cuda": "#3A4E3A",
+ "D": "#ba595e",
+ "DM": "#447265",
+ "Dart": "#00B4AB",
+ "DataWeave": "#003a52",
+ "Dhall": "#dfafff",
+ "Dockerfile": "#384d54",
+ "Dogescript": "#cca760",
+ "Dylan": "#6c616e",
+ "E": "#ccce35",
+ "ECL": "#8a1267",
+ "EQ": "#a78649",
+ "Eiffel": "#946d57",
+ "Elixir": "#6e4a7e",
+ "Elm": "#60B5CC",
+ "Emacs Lisp": "#c065db",
+ "EmberScript": "#FFF4F3",
+ "Erlang": "#B83998",
+ "F#": "#b845fc",
+ "F*": "#572e30",
+ "FLUX": "#88ccff",
+ "Factor": "#636746",
+ "Fancy": "#7b9db4",
+ "Fantom": "#14253c",
+ "Faust": "#c37240",
+ "Forth": "#341708",
+ "Fortran": "#4d41b1",
+ "FreeMarker": "#0050b2",
+ "Frege": "#00cafe",
+ "G-code": "#D08CF2",
+ "GAML": "#FFC766",
+ "GDScript": "#355570",
+ "Game Maker Language": "#71b417",
+ "Genie": "#fb855d",
+ "Gherkin": "#5B2063",
+ "Glyph": "#c1ac7f",
+ "Gnuplot": "#f0a9f0",
+ "Go": "#00ADD8",
+ "Golo": "#88562A",
+ "Gosu": "#82937f",
+ "Grammatical Framework": "#79aa7a",
+ "Groovy": "#e69f56",
+ "HTML": "#e34c26",
+ "Hack": "#878787",
+ "Harbour": "#0e60e3",
+ "Haskell": "#5e5086",
+ "Haxe": "#df7900",
+ "HiveQL": "#dce200",
+ "HolyC": "#ffefaf",
+ "Hy": "#7790B2",
+ "IDL": "#a3522f",
+ "IGOR Pro": "#0000cc",
+ "Idris": "#b30000",
+ "Io": "#a9188d",
+ "Ioke": "#078193",
+ "Isabelle": "#FEFE00",
+ "J": "#9EEDFF",
+ "JSONiq": "#40d47e",
+ "Java": "#b07219",
+ "JavaScript": "#f1e05a",
+ "Jolie": "#843179",
+ "Jsonnet": "#0064bd",
+ "Julia": "#a270ba",
+ "Jupyter Notebook": "#DA5B0B",
+ "KRL": "#28430A",
+ "Kotlin": "#F18E33",
+ "LFE": "#4C3023",
+ "LLVM": "#185619",
+ "LOLCODE": "#cc9900",
+ "LSL": "#3d9970",
+ "Lasso": "#999999",
+ "Lex": "#DBCA00",
+ "LiveScript": "#499886",
+ "LookML": "#652B81",
+ "Lua": "#000080",
+ "MATLAB": "#e16737",
+ "MAXScript": "#00a6a6",
+ "MLIR": "#5EC8DB",
+ "MQL4": "#62A8D6",
+ "MQL5": "#4A76B8",
+ "MTML": "#b7e1f4",
+ "Makefile": "#427819",
+ "Mask": "#f97732",
+ "Max": "#c4a79c",
+ "Mercury": "#ff2b2b",
+ "Meson": "#007800",
+ "Metal": "#8f14e9",
+ "Mirah": "#c7a938",
+ "Modula-3": "#223388",
+ "NCL": "#28431f",
+ "Nearley": "#990000",
+ "Nemerle": "#3d3c6e",
+ "NetLinx": "#0aa0ff",
+ "NetLinx+ERB": "#747faa",
+ "NetLogo": "#ff6375",
+ "NewLisp": "#87AED7",
+ "Nextflow": "#3ac486",
+ "Nim": "#37775b",
+ "Nit": "#009917",
+ "Nix": "#7e7eff",
+ "Nu": "#c9df40",
+ "OCaml": "#3be133",
+ "ObjectScript": "#424893",
+ "Objective-C": "#438eff",
+ "Objective-C++": "#6866fb",
+ "Objective-J": "#ff0c5a",
+ "Odin": "#60AFFE",
+ "Omgrofl": "#cabbff",
+ "Opal": "#f7ede0",
+ "OpenQASM": "#AA70FF",
+ "Oxygene": "#cdd0e3",
+ "Oz": "#fab738",
+ "P4": "#7055b5",
+ "PHP": "#4F5D95",
+ "PLSQL": "#dad8d8",
+ "Pan": "#cc0000",
+ "Papyrus": "#6600cc",
+ "Parrot": "#f3ca0a",
+ "Pascal": "#E3F171",
+ "Pawn": "#dbb284",
+ "Pep8": "#C76F5B",
+ "Perl": "#0298c3",
+ "PigLatin": "#fcd7de",
+ "Pike": "#005390",
+ "PogoScript": "#d80074",
+ "PostScript": "#da291c",
+ "PowerBuilder": "#8f0f8d",
+ "PowerShell": "#012456",
+ "Processing": "#0096D8",
+ "Prolog": "#74283c",
+ "Propeller Spin": "#7fa2a7",
+ "Puppet": "#302B6D",
+ "PureBasic": "#5a6986",
+ "PureScript": "#1D222D",
+ "Python": "#3572A5",
+ "QML": "#44a51c",
+ "Quake": "#882233",
+ "R": "#198CE7",
+ "RAML": "#77d9fb",
+ "RUNOFF": "#665a4e",
+ "Racket": "#3c5caa",
+ "Ragel": "#9d5200",
+ "Raku": "#0000fb",
+ "Rascal": "#fffaa0",
+ "Reason": "#ff5847",
+ "Rebol": "#358a5b",
+ "Red": "#f50000",
+ "Ren'Py": "#ff7f7f",
+ "Ring": "#2D54CB",
+ "Riot": "#A71E49",
+ "Roff": "#ecdebe",
+ "Rouge": "#cc0088",
+ "Ruby": "#701516",
+ "Rust": "#dea584",
+ "SAS": "#B34936",
+ "SQF": "#3F3F3F",
+ "SRecode Template": "#348a34",
+ "SaltStack": "#646464",
+ "Scala": "#c22d40",
+ "Scheme": "#1e4aec",
+ "Self": "#0579aa",
+ "Shell": "#89e051",
+ "Shen": "#120F14",
+ "Slash": "#007eff",
+ "Slice": "#003fa2",
+ "SmPL": "#c94949",
+ "Smalltalk": "#596706",
+ "Solidity": "#AA6746",
+ "SourcePawn": "#5c7611",
+ "Squirrel": "#800000",
+ "Stan": "#b2011d",
+ "Standard ML": "#dc566d",
+ "Starlark": "#76d275",
+ "SuperCollider": "#46390b",
+ "Swift": "#ffac45",
+ "SystemVerilog": "#DAE1C2",
+ "TI Program": "#A0AA87",
+ "Tcl": "#e4cc98",
+ "TeX": "#3D6117",
+ "Terra": "#00004c",
+ "Turing": "#cf142b",
+ "TypeScript": "#2b7489",
+ "UnrealScript": "#a54c4d",
+ "V": "#5d87bd",
+ "VBA": "#867db1",
+ "VBScript": "#15dcdc",
+ "VCL": "#148AA8",
+ "VHDL": "#adb2cb",
+ "Vala": "#fbe5cd",
+ "Verilog": "#b2b7f8",
+ "Vim script": "#199f4b",
+ "Visual Basic .NET": "#945db7",
+ "Volt": "#1F1F1F",
+ "Vue": "#2c3e50",
+ "WebAssembly": "#04133b",
+ "Wollok": "#a23738",
+ "X10": "#4B6BEF",
+ "XC": "#99DA07",
+ "XQuery": "#5232e7",
+ "XSLT": "#EB8CEB",
+ "YARA": "#220000",
+ "YASnippet": "#32AB90",
+ "Yacc": "#4B6C4B",
+ "ZAP": "#0d665e",
+ "ZIL": "#dc75e5",
+ "ZenScript": "#00BCD1",
+ "Zephir": "#118f9e",
+ "Zig": "#ec915c",
+ "eC": "#913960",
+ "mIRC Script": "#926059",
+ "mcfunction": "#E22837",
+ "nesC": "#94B0C7",
+ "ooc": "#b0b77e",
+ "q": "#0040cd",
+ "sed": "#64b970",
+ "wdl": "#42f1f4",
+ "wisp": "#7582D1",
+ "xBase": "#403a40",
+}
+
+function getLabel(isZh,key){
+ if(isZh){
+ return zhCN[key]
+ }else{
+ return esUN[key]
+ }
+}
+
+var zhCN={
+ "search":"搜索",
+ "search_repo":"项目",
+ "search_dataset":"数据集",
+ "search_issue":"任务",
+ "search_pr":"合并请求",
+ "search_user":"用户",
+ "search_org":"组织",
+ "search_finded":"找到",
+ "search_matched":"最佳匹配",
+ "search_matched_download":"下载次数",
+ "search_lasted_update":"最后更新于",
+ "search_letter_asc":"字母顺序排序",
+ "search_letter_desc":"字母逆序排序",
+ "search_lasted_create":"最近创建",
+ "search_early_create":"最早创建",
+ "search_add_by":"加入于",
+ "search_lasted":"最近更新",
+ "search_open":"开启中",
+ "search_closed":"已关闭",
+ "search_watched":"关注数",
+ "search_star":"点赞数",
+ "search_fork":"Fork数",
+ "search_input_large_0":"请输入大于0的数值。",
+ "search_input_maxed":"不能超出总页数。",
+ "search_input_total":"共",
+ "search_srtip":"条",
+ "search_home_page":"首页",
+ "search_last_page":"末页",
+ "search_go_to":"前往",
+ "search_go_page":"页",
+ "find_title":"“{keyword}”相关{tablename}约为{total}个",
+ "search_empty":"请输入任意关键字开始搜索。"
+ }
+
+ var esUN={
+ "search":"Search",
+ "search_repo":"Repository",
+ "search_dataset":"DataSet",
+ "search_issue":"Issue",
+ "search_pr":"Pull Request",
+ "search_user":"User",
+ "search_org":"Organization",
+ "search_finded":"Find",
+ "search_matched":"Best Match",
+ "search_matched_download":"Most downloads",
+ "search_lasted_update":"Updated ",
+ "search_letter_asc":"Alphabetically",
+ "search_letter_desc":"Reverse alphabetically",
+ "search_lasted_create":"Recently created",
+ "search_early_create":"First created",
+ "search_add_by":"Joined on",
+ "search_lasted":"Recently updated",
+ "search_open":"Open",
+ "search_closed":"Closed",
+ "search_watched":"Watches",
+ "search_star":"Stars",
+ "search_fork":"Forks",
+ "search_input_large_0":"Please enter a value greater than 0.",
+ "search_input_maxed":"Cannot exceed total pages.",
+ "search_input_total":"Total",
+ "search_srtip":"",
+ "search_home_page":"First",
+ "search_last_page":"Last",
+ "search_go_to":"Go",
+ "search_go_page":"Page",
+ "find_title":" {total} \"{keyword}\" related {tablename}",
+ "search_empty":"Please enter any keyword to start the search."
+ }
+ initDiv(false);
+ document.onreadystatechange = function() {
+ if (document.readyState === "complete") {
+ var tmpSearchLabel = sessionStorage.getItem("searchLabel");
+ console.log("tmpSearchLabel=" + tmpSearchLabel);
+ if(tmpSearchLabel){
+ console.log("search label....");
+ sessionStorage.removeItem("searchLabel");
+ doSearchLabel(sessionStorage.getItem("tableName"),sessionStorage.getItem("keyword"),sessionStorage.getItem("sortBy"),sessionStorage.getItem("ascending"));
+ }else{
+ console.log("normal search....");
+ search();
+ }
+ }
+ }
+
+
diff --git a/public/self/dataset_preview.js b/public/self/dataset_preview.js
index 3c9ded0aa..e6b79dd7d 100644
--- a/public/self/dataset_preview.js
+++ b/public/self/dataset_preview.js
@@ -620,10 +620,10 @@ function showfilelist(){
for (var i=0;i 70){
var tmpIndex = labeltastresult[i].pic_image_field.indexOf("/",70);
- console.log(tmpIndex)
+ //console.log(tmpIndex)
if(tmpIndex != -1){
fname = labeltastresult[i].pic_image_field.substring(tmpIndex + 1);
fname = fname.substring(fname.indexOf('/')+1);
@@ -679,7 +679,7 @@ function breadFiles(){
fname_full_path = tableData[fileindex].pic_image_field.substring(tmp_index + 1);
}
var fname_path = fname_full_path.split('/')
- console.log(fname_path)
+ //console.log(fname_path)
// var filename_text = tableData[fileindex].pic_image_field.substring(tableData[fileindex].pic_image_field.lastIndexOf('/')+1)
var html_breadFile = ''
// var source_name = filename_title+'.zip'
diff --git a/routers/admin/cloudbrains.go b/routers/admin/cloudbrains.go
old mode 100644
new mode 100755
index 6bbd534b9..884ed6b9b
--- a/routers/admin/cloudbrains.go
+++ b/routers/admin/cloudbrains.go
@@ -41,7 +41,7 @@ func CloudBrains(ctx *context.Context) {
if page <= 0 {
page = 1
}
- debugType := modelarts.DebugType
+ debugType := models.TypeCloudBrainAll
if listType == models.GPUResource {
debugType = models.TypeCloudBrainOne
} else if listType == models.NPUResource {
@@ -121,7 +121,7 @@ func DownloadCloudBrains(ctx *context.Context) {
Page: page,
PageSize: 1,
},
- Type: modelarts.DebugType,
+ Type: models.TypeCloudBrainAll,
NeedRepoInfo: false,
IsLatestVersion: modelarts.IsLatestVersion,
})
@@ -151,7 +151,7 @@ func DownloadCloudBrains(ctx *context.Context) {
Page: page,
PageSize: pageSize,
},
- Type: modelarts.DebugType,
+ Type: models.TypeCloudBrainAll,
NeedRepoInfo: true,
IsLatestVersion: modelarts.IsLatestVersion,
})
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index 87af1d19b..fded809f4 100755
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -62,10 +62,10 @@ import (
"net/http"
"strings"
- "code.gitea.io/gitea/routers/authentication"
-
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
+
+ "code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
@@ -77,6 +77,7 @@ import (
"code.gitea.io/gitea/routers/api/v1/repo"
_ "code.gitea.io/gitea/routers/api/v1/swagger" // for swagger generation
"code.gitea.io/gitea/routers/api/v1/user"
+ "code.gitea.io/gitea/routers/authentication"
repo_ext "code.gitea.io/gitea/routers/repo"
"gitea.com/macaron/binding"
@@ -890,6 +891,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/cloudbrain", func() {
m.Get("/:id", repo.GetCloudbrainTask)
m.Get("/:id/log", repo.CloudbrainGetLog)
+ m.Group("/train-job", func() {
+ m.Group("/:jobid", func() {
+ m.Get("", repo.GetModelArtsTrainJobVersion)
+ m.Get("/model_list", repo.CloudBrainModelList)
+ m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
+ })
+ })
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/modelarts", func() {
m.Group("/notebook", func() {
diff --git a/routers/api/v1/repo/cloudbrain.go b/routers/api/v1/repo/cloudbrain.go
index f92259c3d..a81263823 100755
--- a/routers/api/v1/repo/cloudbrain.go
+++ b/routers/api/v1/repo/cloudbrain.go
@@ -6,15 +6,18 @@
package repo
import (
+ "encoding/json"
"net/http"
"sort"
+ "strings"
"time"
- "code.gitea.io/gitea/modules/log"
-
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/storage"
+ routerRepo "code.gitea.io/gitea/routers/repo"
)
// cloudbrain get job task by jobid
@@ -62,7 +65,7 @@ func GetCloudbrainTask(ctx *context.APIContext) {
log.Error("GetJob failed:", err)
return
}
- result, err := models.ConvertToJobResultPayload(jobResult.Payload)
+ result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
@@ -70,16 +73,16 @@ func GetCloudbrainTask(ctx *context.APIContext) {
}
job.Status = result.JobStatus.State
+ taskRoles := result.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
- taskRoles := result.TaskRoles
- taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
-
job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}
if result.JobStatus.State != string(models.JobWaiting) {
+ models.ParseAndSetDurationFromCloudBrainOne(result, job)
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
@@ -152,3 +155,55 @@ func CloudbrainGetLog(ctx *context.Context) {
return
}
+
+func CloudBrainModelList(ctx *context.APIContext) {
+ var (
+ err error
+ )
+
+ var jobID = ctx.Params(":jobid")
+ var versionName = ctx.Query("version_name")
+ parentDir := ctx.Query("parentDir")
+ dirArray := strings.Split(parentDir, "/")
+
+ task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, versionName)
+ if err != nil {
+ log.Error("GetCloudbrainByJobID(%s) failed:%v", task.JobName, err.Error())
+ return
+ }
+
+ //get dirs
+ dirs, err := routerRepo.GetModelDirs(task.JobName, parentDir)
+ if err != nil {
+ log.Error("GetModelDirs failed:%v", err.Error(), ctx.Data["msgID"])
+ ctx.ServerError("GetModelDirs failed:", err)
+ return
+ }
+
+ var fileInfos []storage.FileInfo
+ err = json.Unmarshal([]byte(dirs), &fileInfos)
+ if err != nil {
+ log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"])
+ ctx.ServerError("json.Unmarshal failed:", err)
+ return
+ }
+
+ for i, fileInfo := range fileInfos {
+ temp, _ := time.Parse("2006-01-02 15:04:05", fileInfo.ModTime)
+ fileInfos[i].ModTime = temp.Local().Format("2006-01-02 15:04:05")
+ }
+
+ sort.Slice(fileInfos, func(i, j int) bool {
+ return fileInfos[i].ModTime > fileInfos[j].ModTime
+ })
+
+ ctx.JSON(http.StatusOK, map[string]interface{}{
+ "JobID": jobID,
+ "VersionName": versionName,
+ "StatusOK": 0,
+ "Path": dirArray,
+ "Dirs": fileInfos,
+ "task": task,
+ "PageIsCloudBrain": true,
+ })
+}
diff --git a/routers/api/v1/repo/modelarts.go b/routers/api/v1/repo/modelarts.go
index 893f2a32c..e24ac95fb 100755
--- a/routers/api/v1/repo/modelarts.go
+++ b/routers/api/v1/repo/modelarts.go
@@ -10,13 +10,13 @@ import (
"strconv"
"strings"
- "code.gitea.io/gitea/modules/util"
-
"code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
routerRepo "code.gitea.io/gitea/routers/repo"
)
@@ -67,8 +67,14 @@ func GetModelArtsNotebook2(ctx *context.APIContext) {
ctx.NotFound(err)
return
}
-
+ if job.StartTime == 0 && result.Lease.UpdateTime > 0 {
+ job.StartTime = timeutil.TimeStamp(result.Lease.UpdateTime / 1000)
+ }
job.Status = result.Status
+ if job.EndTime == 0 && models.IsModelArtsDebugJobTerminal(job.Status) {
+ job.EndTime = timeutil.TimeStampNow()
+ }
+ job.ComputeAndSetDuration()
err = models.UpdateJob(job)
if err != nil {
log.Error("UpdateJob failed:", err)
@@ -128,26 +134,61 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
ctx.NotFound(err)
return
}
- result, err := modelarts.GetTrainJob(jobID, strconv.FormatInt(job.VersionID, 10))
- if err != nil {
- ctx.NotFound(err)
- return
- }
- job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
- job.Duration = result.Duration
- job.TrainJobDuration = result.TrainJobDuration
+ if job.Type == models.TypeCloudBrainOne {
+ jobResult, err := cloudbrain.GetJob(job.JobID)
+ if err != nil {
+ ctx.NotFound(err)
+ log.Error("GetJob failed:", err)
+ return
+ }
+ result, err := models.ConvertToJobResultPayload(jobResult.Payload)
+ if err != nil {
+ ctx.NotFound(err)
+ log.Error("ConvertToJobResultPayload failed:", err)
+ return
+ }
- if result.Duration != 0 {
- job.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000)
+ job.Status = result.JobStatus.State
+ if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
+ taskRoles := result.TaskRoles
+ taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
+ job.ContainerID = taskRes.TaskStatuses[0].ContainerID
+ job.Status = taskRes.TaskStatuses[0].State
+ }
+
+ if result.JobStatus.State != string(models.JobWaiting) {
+ err = models.UpdateJob(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
+ }
} else {
- job.TrainJobDuration = "00:00:00"
- }
+ result, err := modelarts.GetTrainJob(jobID, strconv.FormatInt(job.VersionID, 10))
+ if err != nil {
+ ctx.NotFound(err)
+ return
+ }
- err = models.UpdateTrainJobVersion(job)
- if err != nil {
- log.Error("UpdateJob failed:", err)
+ if job.StartTime == 0 && result.StartTime > 0 {
+ job.StartTime = timeutil.TimeStamp(result.StartTime / 1000)
+ }
+ job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
+ job.Duration = result.Duration / 1000
+ job.TrainJobDuration = result.TrainJobDuration
+
+ job.TrainJobDuration = models.ConvertDurationToStr(job.Duration)
+
+ if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 {
+ job.EndTime = job.StartTime.Add(job.Duration)
+ }
+
+ err = models.UpdateTrainJobVersion(job)
+ if err != nil {
+ log.Error("UpdateJob failed:", err)
+ }
}
ctx.JSON(http.StatusOK, map[string]interface{}{
@@ -366,16 +407,15 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) {
ctx.NotFound(err)
return
}
-
+ if job.StartTime == 0 && result.StartTime > 0 {
+ job.StartTime = timeutil.TimeStamp(result.StartTime / 1000)
+ }
job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
- job.Duration = result.Duration
- job.TrainJobDuration = result.TrainJobDuration
+ job.Duration = result.Duration / 1000
+ job.TrainJobDuration = models.ConvertDurationToStr(job.Duration)
- if result.Duration != 0 {
- job.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000)
-
- } else {
- job.TrainJobDuration = "00:00:00"
+ if job.EndTime == 0 && models.IsTrainJobTerminal(job.Status) && job.StartTime > 0 {
+ job.EndTime = job.StartTime.Add(job.Duration)
}
err = models.UpdateInferenceJob(job)
diff --git a/routers/home.go b/routers/home.go
index 2db8d2112..c33d7a049 100755
--- a/routers/home.go
+++ b/routers/home.go
@@ -274,10 +274,11 @@ func ExploreDatasets(ctx *context.Context) {
// ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
var (
- datasets []*models.Dataset
- count int64
- err error
- orderBy models.SearchOrderBy
+ datasets []*models.Dataset
+ datasetsWithStar []*models.DatasetWithStar
+ count int64
+ err error
+ orderBy models.SearchOrderBy
)
page := ctx.QueryInt("page")
if page <= 0 {
@@ -301,6 +302,10 @@ func ExploreDatasets(ctx *context.Context) {
orderBy = models.SearchOrderBySizeReverse
case "downloadtimes":
orderBy = models.SearchOrderByDownloadTimes
+ case "moststars":
+ orderBy = models.SearchOrderByStarsReverse
+ case "feweststars":
+ orderBy = models.SearchOrderByStars
default:
ctx.Data["SortType"] = "recentupdate"
orderBy = models.SearchOrderByRecentUpdated
@@ -308,6 +313,9 @@ func ExploreDatasets(ctx *context.Context) {
keyword := strings.Trim(ctx.Query("q"), " ")
+ category := ctx.Query("category")
+ task := ctx.Query("task")
+ license := ctx.Query("license")
var ownerID int64
if ctx.User != nil && !ctx.User.IsAdmin {
ownerID = ctx.User.ID
@@ -316,25 +324,40 @@ func ExploreDatasets(ctx *context.Context) {
Keyword: keyword,
IncludePublic: true,
SearchOrderBy: orderBy,
+ Category: category,
+ Task: task,
+ License: license,
OwnerID: ownerID,
ListOptions: models.ListOptions{
Page: page,
- PageSize: setting.UI.ExplorePagingNum,
+ PageSize: 30,
},
}
datasets, count, err = models.SearchDataset(opts)
+
if err != nil {
ctx.ServerError("SearchDatasets", err)
return
}
+ for _, dataset := range datasets {
+ if !ctx.IsSigned {
+ datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: false})
+ } else {
+ datasetsWithStar = append(datasetsWithStar, &models.DatasetWithStar{Dataset: *dataset, IsStaring: models.IsDatasetStaring(ctx.User.ID, dataset.ID)})
+ }
+
+ }
pager := context.NewPagination(int(count), opts.PageSize, page, 5)
ctx.Data["Keyword"] = opts.Keyword
+ ctx.Data["Category"] = category
+ ctx.Data["Task"] = task
+ ctx.Data["License"] = license
pager.SetDefaultParams(ctx)
ctx.Data["Page"] = pager
- ctx.Data["Datasets"] = datasets
+ ctx.Data["Datasets"] = datasetsWithStar
ctx.Data["Total"] = count
ctx.Data["PageIsDatasets"] = true
ctx.HTML(200, tplExploreDataset)
diff --git a/routers/init.go b/routers/init.go
index 8b93b64d8..eab513c78 100755
--- a/routers/init.go
+++ b/routers/init.go
@@ -71,6 +71,8 @@ func NewServices() {
log.Info("decompression.NewContext() succeed.")
labelmsg.Init()
log.Info("labelmsg.Init() succeed.")
+ InitESClient()
+ log.Info("ES Client succeed.")
}
// In case of problems connecting to DB, retry connection. Eg, PGSQL in Docker Container on Synology
diff --git a/routers/private/internal.go b/routers/private/internal.go
index 0dd725ca3..d80a706cc 100755
--- a/routers/private/internal.go
+++ b/routers/private/internal.go
@@ -6,6 +6,7 @@
package private
import (
+ "code.gitea.io/gitea/routers/repo"
"strings"
"code.gitea.io/gitea/modules/log"
@@ -45,6 +46,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/tool/update_all_repo_commit_cnt", UpdateAllRepoCommitCnt)
m.Post("/tool/repo_stat/:date", RepoStatisticManually)
m.Post("/tool/update_repo_visit/:date", UpdateRepoVisit)
+ m.Post("/task/history_handle/duration", repo.HandleTaskWithNoDuration)
}, CheckInternalToken)
}
diff --git a/routers/repo/attachment.go b/routers/repo/attachment.go
index 668169110..96f17b74b 100755
--- a/routers/repo/attachment.go
+++ b/routers/repo/attachment.go
@@ -15,6 +15,10 @@ import (
"strconv"
"strings"
+ "code.gitea.io/gitea/modules/auth"
+
+ "code.gitea.io/gitea/modules/base"
+
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/labelmsg"
@@ -30,8 +34,10 @@ import (
const (
//result of decompress
- DecompressSuccess = "0"
- DecompressFailed = "1"
+ DecompressSuccess = "0"
+ DecompressFailed = "1"
+ tplAttachmentUpload base.TplName = "repo/attachment/upload"
+ tplAttachmentEdit base.TplName = "repo/attachment/edit"
)
type CloudBrainDataset struct {
@@ -63,6 +69,40 @@ func renderAttachmentSettings(ctx *context.Context) {
ctx.Data["AttachmentMaxFiles"] = setting.Attachment.MaxFiles
}
+func UploadAttachmentUI(ctx *context.Context) {
+ ctx.Data["datasetId"] = ctx.Query("datasetId")
+ ctx.Data["PageIsDataset"] = true
+
+ ctx.HTML(200, tplAttachmentUpload)
+
+}
+
+func EditAttachmentUI(ctx *context.Context) {
+
+ id, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
+ ctx.Data["PageIsDataset"] = true
+ attachment, _ := models.GetAttachmentByID(id)
+ if attachment == nil {
+ ctx.Error(404, "The attachment does not exits.")
+ }
+ ctx.Data["Attachment"] = attachment
+ ctx.HTML(200, tplAttachmentEdit)
+
+}
+
+func EditAttachment(ctx *context.Context, form auth.EditAttachmentForm) {
+
+ err := models.UpdateAttachmentDescription(&models.Attachment{
+ ID: form.ID,
+ Description: form.Description,
+ })
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.edit_attachment_fail")))
+ }
+ ctx.JSON(http.StatusOK, models.BaseOKMessage)
+
+}
+
// UploadAttachment response for uploading issue's attachment
func UploadAttachment(ctx *context.Context) {
if !setting.Attachment.Enabled {
@@ -241,14 +281,20 @@ func GetAttachment(ctx *context.Context) {
}
if dataSet != nil {
- isPermit, err := models.GetUserDataSetPermission(dataSet, ctx.User)
- if err != nil {
- ctx.Error(http.StatusInternalServerError, "GetUserDataSetPermission", err.Error())
- return
- }
- if !isPermit {
- ctx.Error(http.StatusNotFound)
+ if !ctx.IsSigned {
+ ctx.SetCookie("redirect_to", setting.AppSubURL+ctx.Req.URL.RequestURI(), 0, setting.AppSubURL)
+ ctx.Redirect(setting.AppSubURL + "/user/login")
return
+ } else {
+ isPermit, err := models.GetUserDataSetPermission(dataSet, ctx.User)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetUserDataSetPermission", err.Error())
+ return
+ }
+ if !isPermit {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
}
}
@@ -387,11 +433,17 @@ func AddAttachment(ctx *context.Context) {
ctx.Error(404, "attachment has not been uploaded")
return
}
+ datasetId := ctx.QueryInt64("dataset_id")
+ dataset, err := models.GetDatasetByID(datasetId)
+ if err != nil {
+ ctx.Error(404, "dataset does not exist.")
+ return
+ }
attachment, err := models.InsertAttachment(&models.Attachment{
UUID: uuid,
UploaderID: ctx.User.ID,
- IsPrivate: true,
+ IsPrivate: dataset.IsPrivate(),
Name: fileName,
Size: ctx.QueryInt64("size"),
DatasetID: ctx.QueryInt64("dataset_id"),
@@ -798,6 +850,9 @@ func CompleteMultipart(ctx *context.Context) {
typeCloudBrain := ctx.QueryInt("type")
fileName := ctx.Query("file_name")
+ log.Warn("uuid:" + uuid)
+ log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain))
+
err := checkTypeCloudBrain(typeCloudBrain)
if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err)
@@ -835,22 +890,24 @@ func CompleteMultipart(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}
-
+ dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id"))
+ log.Warn("insert attachment to datasetId:" + strconv.FormatInt(dataset.ID, 10))
attachment, err := models.InsertAttachment(&models.Attachment{
- UUID: uuid,
- UploaderID: ctx.User.ID,
- IsPrivate: true,
- Name: fileName,
- Size: ctx.QueryInt64("size"),
- DatasetID: ctx.QueryInt64("dataset_id"),
- Type: typeCloudBrain,
+ UUID: uuid,
+ UploaderID: ctx.User.ID,
+ IsPrivate: dataset.IsPrivate(),
+ Name: fileName,
+ Size: ctx.QueryInt64("size"),
+ DatasetID: ctx.QueryInt64("dataset_id"),
+ Description: ctx.Query("description"),
+ Type: typeCloudBrain,
})
if err != nil {
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err))
return
}
- dataset, _ := models.GetDatasetByID(attachment.DatasetID)
+
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment)
diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go
index 65db818a0..898f3844f 100755
--- a/routers/repo/cloudbrain.go
+++ b/routers/repo/cloudbrain.go
@@ -2,6 +2,7 @@ package repo
import (
"bufio"
+ "code.gitea.io/gitea/modules/timeutil"
"encoding/json"
"errors"
"fmt"
@@ -36,6 +37,9 @@ const (
tplCloudBrainBenchmarkIndex base.TplName = "repo/cloudbrain/benchmark/index"
tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new"
tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show"
+
+ tplCloudBrainTrainJobNew base.TplName = "repo/cloudbrain/trainjob/new"
+ tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show"
)
var (
@@ -44,6 +48,7 @@ var (
benchmarkTypes *models.BenchmarkTypes
benchmarkGpuInfos *models.GpuInfos
benchmarkResourceSpecs *models.ResourceSpecs
+ trainGpuInfos *models.GpuInfos
)
const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types"
@@ -142,6 +147,11 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error {
}
ctx.Data["gpu_types"] = gpuInfos.GpuInfo
+ if trainGpuInfos == nil {
+ json.Unmarshal([]byte(setting.TrainGpuTypes), &trainGpuInfos)
+ }
+ ctx.Data["train_gpu_types"] = trainGpuInfos.GpuInfo
+
if benchmarkGpuInfos == nil {
json.Unmarshal([]byte(setting.BenchmarkGpuTypes), &benchmarkGpuInfos)
}
@@ -156,12 +166,22 @@ func cloudBrainNewDataPrepare(ctx *context.Context) error {
json.Unmarshal([]byte(setting.ResourceSpecs), &cloudbrain.ResourceSpecs)
}
ctx.Data["resource_specs"] = cloudbrain.ResourceSpecs.ResourceSpec
+
+ if cloudbrain.TrainResourceSpecs == nil {
+ json.Unmarshal([]byte(setting.TrainResourceSpecs), &cloudbrain.TrainResourceSpecs)
+ }
+ ctx.Data["train_resource_specs"] = cloudbrain.TrainResourceSpecs.ResourceSpec
+ ctx.Data["params"] = ""
+ ctx.Data["branchName"] = ctx.Repo.BranchName
+
ctx.Data["snn4imagenet_path"] = cloudbrain.Snn4imagenetMountPath
ctx.Data["is_snn4imagenet_enabled"] = setting.IsSnn4imagenetEnabled
ctx.Data["brainscore_path"] = cloudbrain.BrainScoreMountPath
ctx.Data["is_brainscore_enabled"] = setting.IsBrainScoreEnabled
+ ctx.Data["cloudbraintype"] = models.TypeCloudBrainOne
+
return nil
}
@@ -181,38 +201,52 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
image := form.Image
uuid := form.Attachment
jobType := form.JobType
- command := cloudbrain.Command
gpuQueue := form.GpuType
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
resourceSpecId := form.ResourceSpecId
+ branchName := form.BranchName
repo := ctx.Repo.Repository
- tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeDebug), displayJobName)
+ tpl := tplCloudBrainNew
+ command := cloudbrain.Command
+ if jobType == string(models.JobTypeTrain) {
+ tpl = tplCloudBrainTrainJobNew
+ commandTrain, err := getTrainJobCommand(form)
+ if err != nil {
+ log.Error("getTrainJobCommand failed: %v", err)
+ ctx.RenderWithErr(err.Error(), tpl, &form)
+ return
+ }
+
+ command = commandTrain
+ }
+
+ tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr("the job name did already exist", tplCloudBrainNew, &form)
+ ctx.RenderWithErr("the job name did already exist", tpl, &form)
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr("system error", tplCloudBrainNew, &form)
+ ctx.RenderWithErr("system error", tpl, &form)
return
}
}
if !jobNamePattern.MatchString(displayJobName) {
- ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tplCloudBrainNew, &form)
+ ctx.RenderWithErr(ctx.Tr("repo.cloudbrain_jobname_err"), tpl, &form)
return
}
- if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) && jobType != string(models.JobTypeBrainScore) {
+ if jobType != string(models.JobTypeBenchmark) && jobType != string(models.JobTypeDebug) && jobType != string(models.JobTypeSnn4imagenet) && jobType != string(models.JobTypeBrainScore) && jobType != string(models.JobTypeTrain) {
log.Error("jobtype error:", jobType, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr("jobtype error", tplCloudBrainNew, &form)
+ ctx.RenderWithErr("jobtype error", tpl, &form)
return
}
@@ -220,18 +254,21 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr("system error", tplCloudBrainNew, &form)
+ ctx.RenderWithErr("system error", tpl, &form)
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr("you have already a running or waiting task, can not create more", tplCloudBrainNew, &form)
+ ctx.RenderWithErr("you have already a running or waiting task, can not create more", tpl, &form)
return
}
}
- downloadCode(repo, codePath)
+ if branchName == "" {
+ branchName = cloudbrain.DefaultBranchName
+ }
+ downloadCode(repo, codePath, branchName)
uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/")
modelPath := setting.JobPath + jobName + cloudbrain.ModelMountPath + "/"
@@ -265,15 +302,19 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
err = cloudbrain.GenerateTask(ctx, displayJobName, jobName, image, command, uuid, storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
- storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), jobType, gpuQueue, form.Description,
+ storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), jobType, gpuQueue, form.Description, branchName, form.BootFile, form.Params,
0, 0, resourceSpecId)
if err != nil {
cloudBrainNewDataPrepare(ctx)
- ctx.RenderWithErr(err.Error(), tplCloudBrainNew, &form)
+ ctx.RenderWithErr(err.Error(), tpl, &form)
return
}
- ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=all")
+ if jobType == string(models.JobTypeTrain) {
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=all")
+ } else {
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/debugjob?debugListType=all")
+ }
}
func CloudBrainRestart(ctx *context.Context) {
@@ -339,18 +380,29 @@ func CloudBrainRestart(ctx *context.Context) {
}
func CloudBrainBenchMarkShow(ctx *context.Context) {
- cloudBrainShow(ctx, tplCloudBrainBenchmarkShow)
+ cloudBrainShow(ctx, tplCloudBrainBenchmarkShow, models.JobTypeBenchmark)
}
func CloudBrainShow(ctx *context.Context) {
- cloudBrainShow(ctx, tplCloudBrainShow)
+ cloudBrainShow(ctx, tplCloudBrainShow, models.JobTypeDebug)
+}
+
+func CloudBrainTrainJobShow(ctx *context.Context) {
+ cloudBrainShow(ctx, tplCloudBrainTrainJobShow, models.JobTypeTrain)
}
-func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
+func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.JobType) {
ctx.Data["PageIsCloudBrain"] = true
- var ID = ctx.Params(":id")
debugListType := ctx.Query("debugListType")
- task, err := models.GetCloudbrainByID(ID)
+
+ var task *models.Cloudbrain
+ var err error
+ if jobType == models.JobTypeTrain {
+ task, err = models.GetCloudbrainByJobID(ctx.Params(":jobid"))
+ } else {
+ task, err = models.GetCloudbrainByID(ctx.Params(":id"))
+ }
+
if err != nil {
log.Info("error:" + err.Error())
ctx.Data["error"] = err.Error()
@@ -365,6 +417,16 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
jobRes.Resource.Memory = strings.ReplaceAll(jobRes.Resource.Memory, "Mi", "MB")
spec := "GPU数:" + strconv.Itoa(jobRes.Resource.NvidiaComGpu) + ",CPU数:" + strconv.Itoa(jobRes.Resource.CPU) + ",内存(MB):" + jobRes.Resource.Memory
ctx.Data["resource_spec"] = spec
+ if task.JobType == string(models.JobTypeTrain) {
+ if trainGpuInfos == nil {
+ json.Unmarshal([]byte(setting.TrainGpuTypes), &trainGpuInfos)
+ }
+ for _, resourceType := range trainGpuInfos.GpuInfo {
+ if resourceType.Queue == jobRes.Config.GpuType {
+ ctx.Data["resource_type"] = resourceType.Value
+ }
+ }
+ }
taskRoles := jobRes.TaskRoles
if jobRes.JobStatus.State != string(models.JobFailed) {
@@ -373,6 +435,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
task.Status = taskRes.TaskStatuses[0].State
task.ContainerID = taskRes.TaskStatuses[0].ContainerID
task.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
+ models.ParseAndSetDurationFromCloudBrainOne(jobRes, task)
err = models.UpdateJob(task)
if err != nil {
ctx.Data["error"] = err.Error()
@@ -398,12 +461,6 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
task.User = user
}
- var duration int64
- if task.Status == string(models.JobRunning) {
- duration = time.Now().Unix() - int64(task.CreatedUnix)
- } else {
- duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix)
- }
if task.BenchmarkTypeID > 0 {
for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType {
if task.BenchmarkTypeID == benchmarkType.Id {
@@ -418,8 +475,42 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
}
}
}
+ if task.TrainJobDuration == "" {
+ if task.Duration == 0 {
+ var duration int64
+ if task.Status == string(models.JobRunning) {
+ duration = time.Now().Unix() - int64(task.CreatedUnix)
+ } else {
+ duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix)
+ }
+ task.Duration = duration
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+ }
+ ctx.Data["duration"] = task.TrainJobDuration
+
+ if len(task.Parameters) > 0 {
+ var parameters models.Parameters
+
+ err := json.Unmarshal([]byte(task.Parameters), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal Parameters: %s (%v)", task.Parameters, err)
+ task.Parameters = ""
+ } else {
+ if len(parameters.Parameter) > 0 {
+ paramTemp := ""
+ for _, Parameter := range parameters.Parameter {
+ param := Parameter.Label + " = " + Parameter.Value + "; "
+ paramTemp = paramTemp + param
+ }
+ task.Parameters = paramTemp[:len(paramTemp)-2]
+ } else {
+ task.Parameters = ""
+ }
+ }
+
+ }
- ctx.Data["duration"] = util.AddZero(duration/3600000) + ":" + util.AddZero(duration%3600000/60000) + ":" + util.AddZero(duration%60000/1000)
ctx.Data["task"] = task
ctx.Data["jobName"] = task.JobName
ctx.Data["displayJobName"] = task.DisplayJobName
@@ -427,6 +518,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName) {
version_list_task = append(version_list_task, task)
ctx.Data["version_list_task"] = version_list_task
ctx.Data["debugListType"] = debugListType
+ ctx.Data["canDownload"] = cloudbrain.CanDeleteJob(ctx, task)
ctx.HTML(200, tpName)
}
@@ -482,6 +574,10 @@ func CloudBrainStop(ctx *context.Context) {
}
task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err, ctx.Data["msgID"])
@@ -494,11 +590,12 @@ func CloudBrainStop(ctx *context.Context) {
break
}
- ctx.JSON(200, map[string]string{
+ ctx.JSON(200, map[string]interface{}{
"result_code": resultCode,
"error_msg": errorMsg,
"status": status,
"id": ID,
+ "StatusOK": 0,
})
}
@@ -575,6 +672,10 @@ func logErrorAndUpdateJobStatus(err error, taskInfo *models.Cloudbrain) {
log.Warn("Failed to stop cloudBrain job:"+taskInfo.JobID, err)
} else {
taskInfo.Status = string(models.JobStopped)
+ if taskInfo.EndTime == 0 {
+ taskInfo.EndTime = timeutil.TimeStampNow()
+ }
+ taskInfo.ComputeAndSetDuration()
err = models.UpdateJob(taskInfo)
if err != nil {
log.Warn("UpdateJob failed", err)
@@ -746,8 +847,8 @@ func GetRate(ctx *context.Context) {
}
}
-func downloadCode(repo *models.Repository, codePath string) error {
- if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil {
+func downloadCode(repo *models.Repository, codePath, branchName string) error {
+ if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}
@@ -945,7 +1046,7 @@ func SyncCloudbrainStatus() {
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
task.Status = taskRes.TaskStatuses[0].State
if task.Status != string(models.JobWaiting) {
- task.Duration = time.Now().Unix() - taskRes.TaskStatuses[0].StartAt.Unix()
+ models.ParseAndSetDurationFromCloudBrainOne(jobRes, task)
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
@@ -966,6 +1067,10 @@ func SyncCloudbrainStatus() {
continue
}
task.Status = string(models.JobStopped)
+ if task.EndTime == 0 {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
@@ -984,14 +1089,20 @@ func SyncCloudbrainStatus() {
if result != nil {
task.Status = result.Status
-
+ if task.StartTime == 0 && result.Lease.UpdateTime > 0 {
+ task.StartTime = timeutil.TimeStamp(result.Lease.UpdateTime / 1000)
+ }
+ if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
continue
}
}
- } else if task.JobType == string(models.JobTypeTrain) {
+ } else if task.JobType == string(models.JobTypeTrain) || task.JobType == string(models.JobTypeInference) {
result, err := modelarts.GetTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10))
if err != nil {
log.Error("GetTrainJob(%s) failed:%v", task.JobName, err)
@@ -1000,16 +1111,160 @@ func SyncCloudbrainStatus() {
if result != nil {
task.Status = modelarts.TransTrainJobStatus(result.IntStatus)
- task.Duration = result.Duration
+ task.Duration = result.Duration / 1000
task.TrainJobDuration = result.TrainJobDuration
- if result.Duration != 0 {
- task.TrainJobDuration = util.AddZero(result.Duration/3600000) + ":" + util.AddZero(result.Duration%3600000/60000) + ":" + util.AddZero(result.Duration%60000/1000)
+ if task.StartTime == 0 && result.StartTime > 0 {
+ task.StartTime = timeutil.TimeStamp(result.StartTime / 1000)
+ }
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+ if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
+ task.EndTime = task.StartTime.Add(task.Duration)
+ }
+
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ continue
+ }
+ }
+ } else {
+ log.Error("task.JobType(%s) is error:%s", task.JobName, task.JobType)
+ }
+ } else {
+ log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
+ }
+ }
+
+ return
+}
+
+func HandleTaskWithNoDuration(ctx *context.Context) {
+ log.Info("HandleTaskWithNoDuration start")
+ count := 0
+ start := time.Now().Unix()
+ for {
+ cloudBrains, err := models.GetStoppedJobWithNoDurationJob()
+ if err != nil {
+ log.Error("HandleTaskWithNoTrainJobDuration failed:", err.Error())
+ break
+ }
+ if len(cloudBrains) == 0 {
+ log.Info("HandleTaskWithNoTrainJobDuration:no task need handle")
+ break
+ }
+ handleNoDurationTask(cloudBrains)
+ count += len(cloudBrains)
+ if len(cloudBrains) < 100 {
+ log.Info("HandleTaskWithNoTrainJobDuration:task less than 100")
+ break
+ }
+ if time.Now().Unix()-start > 600 {
+ log.Info("HandleTaskWithNoDuration : time out")
+ ctx.JSON(200, fmt.Sprintf("task stop for time out,count=%d", count))
+ return
+ }
+ }
+ log.Info("HandleTaskWithNoTrainJobDuration:count=%d", count)
+ ctx.JSON(200, fmt.Sprintf("success,count=%d", count))
+}
+
+func handleNoDurationTask(cloudBrains []*models.Cloudbrain) {
+ for _, task := range cloudBrains {
+ time.Sleep(time.Millisecond * 100)
+ log.Info("Handle job ,%+v", task)
+ if task.Type == models.TypeCloudBrainOne {
+ result, err := cloudbrain.GetJob(task.JobID)
+ if err != nil {
+ log.Error("GetJob(%s) failed:%v", task.JobName, err)
+ updateDefaultDuration(task)
+ continue
+ }
+
+ if result != nil {
+ if result.Msg != "success" {
+ updateDefaultDuration(task)
+ continue
+ }
+ jobRes, err := models.ConvertToJobResultPayload(result.Payload)
+ if err != nil || len(jobRes.TaskRoles) == 0 {
+ updateDefaultDuration(task)
+ continue
+ }
+ taskRoles := jobRes.TaskRoles
+ taskRes, err := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
+ if err != nil || len(taskRes.TaskStatuses) == 0 {
+ updateDefaultDuration(task)
+ continue
+ }
+ task.Status = taskRes.TaskStatuses[0].State
+ log.Info("task startTime = %v endTime= %v ,jobId=%d", jobRes.JobStatus.StartTime, jobRes.JobStatus.EndTime, task.ID)
+ if jobRes.JobStatus.CreatedTime > 0 {
+ task.StartTime = timeutil.TimeStamp(jobRes.JobStatus.CreatedTime / 1000)
+ if jobRes.JobStatus.CompletedTime > 0 {
+ task.EndTime = timeutil.TimeStamp(jobRes.JobStatus.CompletedTime / 1000)
} else {
- task.TrainJobDuration = "00:00:00"
+ task.EndTime = task.UpdatedUnix
}
+ } else {
+ task.StartTime = 0
+ task.EndTime = 0
+ }
+
+ if task.EndTime < task.StartTime {
+ log.Info("endTime[%v] is less than starTime[%v],jobId=%d", task.EndTime, task.StartTime, task.ID)
+ st := task.StartTime
+ task.StartTime = task.EndTime
+ task.EndTime = st
+ }
+ task.Duration = task.EndTime.AsTime().Unix() - task.StartTime.AsTime().Unix()
+ task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ }
+ }
+ } else if task.Type == models.TypeCloudBrainTwo {
+ if task.JobType == string(models.JobTypeDebug) {
+ //result, err := modelarts.GetJob(task.JobID)
+ result, err := modelarts.GetNotebook2(task.JobID)
+ if err != nil {
+ log.Error("GetJob(%s) failed:%v", task.JobName, err)
+ updateDefaultDuration(task)
+ continue
+ }
+
+ if result != nil {
+ task.Status = result.Status
+ startTime := result.Lease.CreateTime
+ duration := result.Lease.Duration / 1000
+ if startTime > 0 {
+ task.StartTime = timeutil.TimeStamp(startTime / 1000)
+ task.EndTime = task.StartTime.Add(duration)
+ }
+ task.ComputeAndSetDuration()
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ continue
+ }
+ }
+ } else if task.JobType == string(models.JobTypeTrain) || task.JobType == string(models.JobTypeInference) {
+ result, err := modelarts.GetTrainJob(task.JobID, strconv.FormatInt(task.VersionID, 10))
+ if err != nil {
+ log.Error("GetTrainJob(%s) failed:%v", task.JobName, err)
+ updateDefaultDuration(task)
+ continue
+ }
+ if result != nil {
+ startTime := result.StartTime / 1000
+ if startTime > 0 {
+ task.StartTime = timeutil.TimeStamp(startTime)
+ task.EndTime = task.StartTime.Add(result.Duration / 1000)
+ }
+ task.ComputeAndSetDuration()
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
@@ -1024,8 +1279,17 @@ func SyncCloudbrainStatus() {
log.Error("task.Type(%s) is error:%d", task.JobName, task.Type)
}
}
+}
- return
+func updateDefaultDuration(task *models.Cloudbrain) {
+ log.Info("updateDefaultDuration: taskId=%d", task.ID)
+ task.StartTime = task.CreatedUnix
+ task.EndTime = task.UpdatedUnix
+ task.ComputeAndSetDuration()
+ err := models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
+ }
}
func CloudBrainBenchmarkIndex(ctx *context.Context) {
@@ -1055,13 +1319,19 @@ func CloudBrainBenchmarkIndex(ctx *context.Context) {
for i, task := range ciTasks {
ciTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain)
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource
- var duration int64
- if task.Status == string(models.JobRunning) {
- duration = time.Now().Unix() - int64(task.Cloudbrain.CreatedUnix)
- } else {
- duration = int64(task.Cloudbrain.UpdatedUnix) - int64(task.Cloudbrain.CreatedUnix)
+ if ciTasks[i].TrainJobDuration == "" {
+ if ciTasks[i].Duration == 0 {
+ var duration int64
+ if task.Status == string(models.JobRunning) {
+ duration = time.Now().Unix() - int64(task.Cloudbrain.CreatedUnix)
+ } else {
+ duration = int64(task.Cloudbrain.UpdatedUnix) - int64(task.Cloudbrain.CreatedUnix)
+ }
+ ciTasks[i].Duration = duration
+ }
+ ciTasks[i].TrainJobDuration = models.ConvertDurationToStr(ciTasks[i].Duration)
}
- ciTasks[i].TrainJobDuration = util.AddZero(duration/3600000) + ":" + util.AddZero(duration%3600000/60000) + ":" + util.AddZero(duration%60000/1000)
+
ciTasks[i].BenchmarkTypeName = ""
if task.BenchmarkTypeID > 0 {
for _, benchmarkType := range GetBenchmarkTypes(ctx).BenchmarkType {
@@ -1278,7 +1548,7 @@ func CloudBrainBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainF
}
os.RemoveAll(codePath)
- if err := downloadCode(repo, codePath); err != nil {
+ if err := downloadCode(repo, codePath, cloudbrain.DefaultBranchName); err != nil {
log.Error("downloadCode failed, %v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr("system error", tplCloudBrainBenchmarkNew, &form)
@@ -1343,7 +1613,7 @@ func CloudBrainBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainF
err = cloudbrain.GenerateTask(ctx, displayJobName, jobName, image, command, childInfo.Attachment, storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"),
storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
- storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), string(models.JobTypeBenchmark), gpuQueue, form.Description,
+ storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), string(models.JobTypeBenchmark), gpuQueue, form.Description, cloudbrain.DefaultBranchName, "", "",
benchmarkTypeID, benchmarkChildTypeID, resourceSpecId)
if err != nil {
cloudBrainNewDataPrepare(ctx)
@@ -1369,10 +1639,66 @@ func BenchmarkDel(ctx *context.Context) {
}
}
+func CloudBrainTrainJobNew(ctx *context.Context) {
+ err := cloudBrainNewDataPrepare(ctx)
+ if err != nil {
+ ctx.ServerError("get new train-job info failed", err)
+ return
+ }
+ ctx.HTML(http.StatusOK, tplCloudBrainTrainJobNew)
+}
+
+func getTrainJobCommand(form auth.CreateCloudBrainForm) (string, error) {
+ var command string
+ bootFile := form.BootFile
+ params := form.Params
+
+ if !strings.HasSuffix(bootFile, ".py") {
+ log.Error("bootFile(%s) format error", bootFile)
+ return command, errors.New("bootFile format error")
+ }
+
+ var parameters models.Parameters
+ var param string
+ if len(params) != 0 {
+ err := json.Unmarshal([]byte(params), ¶meters)
+ if err != nil {
+ log.Error("Failed to Unmarshal params: %s (%v)", params, err)
+ return command, err
+ }
+
+ for _, parameter := range parameters.Parameter {
+ param += " --" + parameter.Label + "=" + parameter.Value
+ }
+ }
+
+ command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + form.DisplayJobName + "-" + cloudbrain.LogFile
+
+ return command, nil
+}
+
+func CloudBrainTrainJobDel(ctx *context.Context) {
+ var listType = ctx.Query("listType")
+ if err := deleteCloudbrainJob(ctx); err != nil {
+ log.Error("deleteCloudbrainJob failed: %v", err, ctx.Data["msgID"])
+ ctx.ServerError(err.Error(), err)
+ return
+ }
+
+ var isAdminPage = ctx.Query("isadminpage")
+ if ctx.IsUserSiteAdmin() && isAdminPage == "true" {
+ ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains")
+ } else {
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType)
+ }
+}
+
func GetBenchmarkTypes(ctx *context.Context) *models.BenchmarkTypes {
var lang = ctx.Locale.Language()
if benchmarkTypesMap[lang] == nil {
var val = i18n.Tr(lang, BENCHMARK_TYPE_CODE)
+ //use config
+ val = setting.BenchmarkTypes
var tempType *models.BenchmarkTypes
if err := json.Unmarshal([]byte(val), &tempType); err != nil {
log.Error("json.Unmarshal BenchmarkTypes(%s) failed:%v", val, err, ctx.Data["MsgID"])
diff --git a/routers/repo/dataset.go b/routers/repo/dataset.go
index 7d59ab486..d23722372 100755
--- a/routers/repo/dataset.go
+++ b/routers/repo/dataset.go
@@ -1,7 +1,14 @@
package repo
import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "regexp"
"sort"
+ "strconv"
+ "strings"
+ "unicode/utf8"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
@@ -12,9 +19,14 @@ import (
)
const (
- tplIndex base.TplName = "repo/datasets/index"
+ tplIndex base.TplName = "repo/datasets/index"
+ tplDatasetCreate base.TplName = "repo/datasets/create"
+ tplDatasetEdit base.TplName = "repo/datasets/edit"
+ taskstplIndex base.TplName = "repo/datasets/tasks/index"
)
+var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`)
+
// MustEnableDataset check if repository enable internal dataset
func MustEnableDataset(ctx *context.Context) {
if !ctx.Repo.CanRead(models.UnitTypeDatasets) {
@@ -84,43 +96,34 @@ func QueryDataSet(ctx *context.Context) []*models.Attachment {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
ctx.Data["SortType"] = ctx.Query("sort")
- switch ctx.Query("sort") {
- case "newest":
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix > attachments[j].CreatedUnix
- })
- case "oldest":
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix < attachments[j].CreatedUnix
- })
- default:
- ctx.Data["SortType"] = "newest"
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix > attachments[j].CreatedUnix
- })
- }
+
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].CreatedUnix > attachments[j].CreatedUnix
+ })
+
return attachments
}
func DatasetIndex(ctx *context.Context) {
log.Info("dataset index 1")
MustEnableDataset(ctx)
+ ctx.Data["PageIsDataset"] = true
repo := ctx.Repo.Repository
dataset, err := models.GetDatasetByRepo(repo)
+ ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets)
if err != nil {
- log.Error("query dataset, not found repo.")
- ctx.NotFound("GetDatasetByRepo", err)
+ log.Warn("query dataset, not found.")
+ ctx.HTML(200, tplIndex)
return
}
+ cloudbrainType := -1
+ if ctx.Query("type") != "" {
- if ctx.Query("type") == "" {
- log.Error("query dataset, not found param type")
- ctx.NotFound("type error", nil)
- return
+ cloudbrainType = ctx.QueryInt("type")
}
- err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset)
+ err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset)
if err != nil {
ctx.ServerError("GetDatasetAttachments", err)
return
@@ -128,53 +131,138 @@ func DatasetIndex(ctx *context.Context) {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
- ctx.Data["SortType"] = ctx.Query("sort")
- switch ctx.Query("sort") {
- case "newest":
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix > attachments[j].CreatedUnix
- })
- case "oldest":
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix < attachments[j].CreatedUnix
- })
- default:
- ctx.Data["SortType"] = "newest"
- sort.Slice(attachments, func(i, j int) bool {
- return attachments[i].CreatedUnix > attachments[j].CreatedUnix
- })
+ sort.Slice(attachments, func(i, j int) bool {
+ return attachments[i].CreatedUnix > attachments[j].CreatedUnix
+ })
+
+ page := ctx.QueryInt("page")
+ if page <= 0 {
+ page = 1
}
+ pagesize := ctx.QueryInt("pagesize")
+ if pagesize <= 0 {
+ pagesize = 10
+ }
+ pager := context.NewPagination(len(attachments), pagesize, page, 5)
+
+ pageAttachments := getPageAttachments(attachments, page, pagesize)
+
+ //load attachment creator
+ for _, attachment := range pageAttachments {
+ uploader, _ := models.GetUserByID(attachment.UploaderID)
+ attachment.Uploader = uploader
+ }
+
+ ctx.Data["Page"] = pager
- ctx.Data["PageIsDataset"] = true
ctx.Data["Title"] = ctx.Tr("dataset.show_dataset")
ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets"
ctx.Data["dataset"] = dataset
- ctx.Data["Attachments"] = attachments
+ ctx.Data["Attachments"] = pageAttachments
ctx.Data["IsOwner"] = true
ctx.Data["StoreType"] = setting.Attachment.StoreType
- ctx.Data["Type"] = ctx.QueryInt("type")
+ ctx.Data["Type"] = cloudbrainType
renderAttachmentSettings(ctx)
ctx.HTML(200, tplIndex)
}
+func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment {
+ begin := (page - 1) * pagesize
+ end := (page) * pagesize
+
+ if begin > len(attachments)-1 {
+ return nil
+ }
+ if end > len(attachments)-1 {
+ return attachments[begin:]
+ } else {
+ return attachments[begin:end]
+ }
+
+}
+
+func CreateDataset(ctx *context.Context) {
+
+ MustEnableDataset(ctx)
+ ctx.Data["PageIsDataset"] = true
+
+ ctx.HTML(200, tplDatasetCreate)
+}
+
+func EditDataset(ctx *context.Context) {
+
+ MustEnableDataset(ctx)
+ ctx.Data["PageIsDataset"] = true
+ datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
+
+ dataset, _ := models.GetDatasetByID(datasetId)
+ if dataset == nil {
+ ctx.Error(http.StatusNotFound, "")
+ return
+ }
+ ctx.Data["Dataset"] = dataset
+
+ ctx.HTML(200, tplDatasetEdit)
+}
+
+func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) {
+
+ dataset := &models.Dataset{}
+
+ if !titlePattern.MatchString(form.Title) {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
+ return
+ }
+ if utf8.RuneCountInString(form.Description) > 1024 {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
+ return
+ }
+
+ dataset.RepoID = ctx.Repo.Repository.ID
+ dataset.UserID = ctx.User.ID
+ dataset.Category = form.Category
+ dataset.Task = form.Task
+ dataset.Title = form.Title
+ dataset.License = form.License
+ dataset.Description = form.Description
+ dataset.DownloadTimes = 0
+ if ctx.Repo.Repository.IsPrivate {
+ dataset.Status = 0
+ } else {
+ dataset.Status = 1
+ }
+ err := models.CreateDataset(dataset)
+ if err != nil {
+ log.Error("fail to create dataset", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail")))
+ } else {
+ ctx.JSON(http.StatusOK, models.BaseOKMessage)
+ }
+
+}
+
func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
ctx.Data["PageIsDataset"] = true
ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset")
+ if !titlePattern.MatchString(form.Title) {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
+ return
+ }
+ if utf8.RuneCountInString(form.Description) > 1024 {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
+ return
+ }
+
rel, err := models.GetDatasetByID(form.ID)
ctx.Data["dataset"] = rel
if err != nil {
- ctx.ServerError("GetDataset", err)
- return
- }
-
- if ctx.HasError() {
- ctx.Data["Error"] = true
- ctx.HTML(200, tplIndex)
+ log.Error("failed to query dataset", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
return
}
@@ -184,9 +272,236 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
rel.Task = form.Task
rel.License = form.License
if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil {
- ctx.Data["Error"] = true
- ctx.HTML(200, tplIndex)
- log.Error("%v", err)
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
}
- ctx.Redirect(ctx.Repo.RepoLink + "/datasets?type=" + form.Type)
+ ctx.JSON(http.StatusOK, models.BaseOKMessage)
+}
+
+func DatasetAction(ctx *context.Context) {
+ var err error
+ datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
+ switch ctx.Params(":action") {
+ case "star":
+ err = models.StarDataset(ctx.User.ID, datasetId, true)
+ case "unstar":
+ err = models.StarDataset(ctx.User.ID, datasetId, false)
+
+ }
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
+ } else {
+ ctx.JSON(http.StatusOK, models.BaseOKMessage)
+ }
+
+}
+
+func CurrentRepoDataset(ctx *context.Context) {
+ page := ctx.QueryInt("page")
+ cloudbrainType := ctx.QueryInt("type")
+ keyword := strings.Trim(ctx.Query("q"), " ")
+
+ repo := ctx.Repo.Repository
+ var datasetIDs []int64
+ dataset, err := models.GetDatasetByRepo(repo)
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
+ return
+ }
+ datasetIDs = append(datasetIDs, dataset.ID)
+ datasets, count, err := models.Attachments(&models.AttachmentsOptions{
+ ListOptions: models.ListOptions{
+ Page: page,
+ PageSize: setting.UI.DatasetPagingNum,
+ },
+ Keyword: keyword,
+ NeedDatasetIDs: true,
+ DatasetIDs: datasetIDs,
+ Type: cloudbrainType,
+ NeedIsPrivate: false,
+ JustNeedZipFile: true,
+ NeedRepoInfo: true,
+ })
+ if err != nil {
+ ctx.ServerError("datasets", err)
+ return
+ }
+
+ data, err := json.Marshal(datasets)
+ if err != nil {
+ log.Error("json.Marshal failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+ ctx.JSON(200, map[string]string{
+ "result_code": "0",
+ "data": string(data),
+ "count": strconv.FormatInt(count, 10),
+ })
+}
+
+func MyDatasets(ctx *context.Context) {
+ page := ctx.QueryInt("page")
+ cloudbrainType := ctx.QueryInt("type")
+ keyword := strings.Trim(ctx.Query("q"), " ")
+
+ uploaderID := ctx.User.ID
+ datasets, count, err := models.Attachments(&models.AttachmentsOptions{
+ ListOptions: models.ListOptions{
+ Page: page,
+ PageSize: setting.UI.DatasetPagingNum,
+ },
+ Keyword: keyword,
+ NeedDatasetIDs: false,
+ UploaderID: uploaderID,
+ Type: cloudbrainType,
+ NeedIsPrivate: false,
+ JustNeedZipFile: true,
+ NeedRepoInfo: true,
+ })
+ if err != nil {
+ ctx.ServerError("datasets", err)
+ return
+ }
+
+ data, err := json.Marshal(datasets)
+ if err != nil {
+ log.Error("json.Marshal failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+ ctx.JSON(200, map[string]string{
+ "result_code": "0",
+ "data": string(data),
+ "count": strconv.FormatInt(count, 10),
+ })
+}
+
+func PublicDataset(ctx *context.Context) {
+ page := ctx.QueryInt("page")
+ cloudbrainType := ctx.QueryInt("type")
+ keyword := strings.Trim(ctx.Query("q"), " ")
+
+ datasets, count, err := models.Attachments(&models.AttachmentsOptions{
+ ListOptions: models.ListOptions{
+ Page: page,
+ PageSize: setting.UI.DatasetPagingNum,
+ },
+ Keyword: keyword,
+ NeedDatasetIDs: false,
+ NeedIsPrivate: true,
+ IsPrivate: false,
+ Type: cloudbrainType,
+ JustNeedZipFile: true,
+ NeedRepoInfo: true,
+ })
+ if err != nil {
+ ctx.ServerError("datasets", err)
+ return
+ }
+
+ data, err := json.Marshal(datasets)
+ if err != nil {
+ log.Error("json.Marshal failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+ ctx.JSON(200, map[string]string{
+ "result_code": "0",
+ "data": string(data),
+ "count": strconv.FormatInt(count, 10),
+ })
+}
+
+func MyFavoriteDataset(ctx *context.Context) {
+ page := ctx.QueryInt("page")
+ cloudbrainType := ctx.QueryInt("type")
+ keyword := strings.Trim(ctx.Query("q"), " ")
+ var datasetIDs []int64
+ datasetStars, err := models.GetDatasetStarByUser(ctx.User)
+ if err != nil {
+ ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
+ log.Error("GetDatasetStarByUser failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+ for i, _ := range datasetStars {
+ datasetIDs = append(datasetIDs, datasetStars[i].DatasetID)
+ }
+
+ datasets, count, err := models.Attachments(&models.AttachmentsOptions{
+ ListOptions: models.ListOptions{
+ Page: page,
+ PageSize: setting.UI.DatasetPagingNum,
+ },
+ Keyword: keyword,
+ NeedDatasetIDs: true,
+ DatasetIDs: datasetIDs,
+ NeedIsPrivate: true,
+ IsPrivate: false,
+ Type: cloudbrainType,
+ JustNeedZipFile: true,
+ NeedRepoInfo: true,
+ })
+ if err != nil {
+ ctx.ServerError("datasets", err)
+ return
+ }
+
+ data, err := json.Marshal(datasets)
+ if err != nil {
+ log.Error("json.Marshal failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+ ctx.JSON(200, map[string]string{
+ "result_code": "0",
+ "data": string(data),
+ "count": strconv.FormatInt(count, 10),
+ })
+
+}
+
+func GetDatasetStatus(ctx *context.Context) {
+
+ var (
+ err error
+ )
+
+ UUID := ctx.Params(":uuid")
+ attachment, err := models.GetAttachmentByUUID(UUID)
+ if err != nil {
+ log.Error("GetDatasetStarByUser failed:", err.Error())
+ ctx.JSON(200, map[string]string{
+ "result_code": "-1",
+ "error_msg": err.Error(),
+ "data": "",
+ })
+ return
+ }
+
+ ctx.JSON(200, map[string]string{
+ "result_code": "0",
+ "UUID": UUID,
+ "AttachmentStatus": fmt.Sprint(attachment.DecompressState),
+ })
}
diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go
index 9c670e203..32d9db9ce 100755
--- a/routers/repo/modelarts.go
+++ b/routers/repo/modelarts.go
@@ -2,6 +2,8 @@ package repo
import (
"archive/zip"
+ "code.gitea.io/gitea/modules/notification"
+ "code.gitea.io/gitea/modules/timeutil"
"encoding/json"
"errors"
"io"
@@ -46,20 +48,26 @@ const (
)
func DebugJobIndex(ctx *context.Context) {
- debugListType := ctx.Query("debugListType")
- ctx.Data["ListType"] = debugListType
+ listType := ctx.Query("debugListType")
+ ctx.Data["ListType"] = listType
MustEnableCloudbrain(ctx)
repo := ctx.Repo.Repository
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
- debugType := modelarts.DebugType
+ typeCloudBrain := models.TypeCloudBrainAll
jobTypeNot := false
- if debugListType == models.GPUResource {
- debugType = models.TypeCloudBrainOne
- } else if debugListType == models.NPUResource {
- debugType = models.TypeCloudBrainTwo
+ if listType == models.GPUResource {
+ typeCloudBrain = models.TypeCloudBrainOne
+ } else if listType == models.NPUResource {
+ typeCloudBrain = models.TypeCloudBrainTwo
+ } else if listType == models.AllResource {
+ typeCloudBrain = models.TypeCloudBrainAll
+ } else {
+ log.Error("listType(%s) error", listType)
+ ctx.ServerError("listType error", errors.New("listType error"))
+ return
}
var jobTypes []string
@@ -70,7 +78,7 @@ func DebugJobIndex(ctx *context.Context) {
PageSize: setting.UI.IssuePagingNum,
},
RepoID: repo.ID,
- Type: debugType,
+ Type: typeCloudBrain,
JobTypeNot: jobTypeNot,
JobTypes: jobTypes,
})
@@ -92,7 +100,7 @@ func DebugJobIndex(ctx *context.Context) {
ctx.Data["Tasks"] = ciTasks
ctx.Data["CanCreate"] = cloudbrain.CanCreateOrDebugJob(ctx)
ctx.Data["RepoIsEmpty"] = repo.IsEmpty
- ctx.Data["debugListType"] = debugListType
+ ctx.Data["debugListType"] = listType
ctx.HTML(200, tplDebugJobIndex)
}
@@ -133,6 +141,8 @@ func notebookNewDataPrepare(ctx *context.Context) error {
}
ctx.Data["flavors"] = modelarts.FlavorInfos.FlavorInfo
+ ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo
+
return nil
}
@@ -407,16 +417,46 @@ func NotebookManage(ctx *context.Context) {
break
}
- task.Status = res.Status
- err = models.UpdateJob(task)
- if err != nil {
- log.Error("UpdateJob(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
- resultCode = "-1"
- errorMsg = "system error"
- break
- }
+ status = res.Status
+ if action == models.ActionStart {
+ newTask := &models.Cloudbrain{
+ Status: status,
+ UserID: task.UserID,
+ RepoID: task.RepoID,
+ JobID: task.JobID,
+ JobName: task.JobName,
+ DisplayJobName: task.DisplayJobName,
+ JobType: task.JobType,
+ Type: task.Type,
+ Uuid: task.Uuid,
+ Image: task.Image,
+ ComputeResource: task.ComputeResource,
+ Description: task.Description,
+ }
- status = task.Status
+ err = models.RestartCloudbrain(task, newTask)
+ if err != nil {
+ log.Error("RestartCloudbrain(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
+ resultCode = "-1"
+ errorMsg = "system error"
+ break
+ }
+ ID = strconv.FormatInt(newTask.ID, 10)
+ notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, ID, task.DisplayJobName, models.ActionCreateDebugNPUTask)
+ } else {
+ task.Status = res.Status
+ if task.EndTime == 0 && models.IsModelArtsDebugJobTerminal(task.Status) {
+ task.EndTime = timeutil.TimeStampNow()
+ }
+ task.ComputeAndSetDuration()
+ err = models.UpdateJob(task)
+ if err != nil {
+ log.Error("UpdateJob(%s) failed:%v", task.JobName, err.Error(), ctx.Data["MsgID"])
+ resultCode = "-1"
+ errorMsg = "system error"
+ break
+ }
+ }
break
}
@@ -473,6 +513,26 @@ func TrainJobIndex(ctx *context.Context) {
page = 1
}
+ listType := ctx.Query("listType")
+ if len(listType) == 0 {
+ listType = models.AllResource
+ }
+ ctx.Data["ListType"] = listType
+
+ typeCloudBrain := models.TypeCloudBrainAll
+ if listType == models.GPUResource {
+ typeCloudBrain = models.TypeCloudBrainOne
+ } else if listType == models.NPUResource {
+ typeCloudBrain = models.TypeCloudBrainTwo
+ } else if listType == models.AllResource {
+ typeCloudBrain = models.TypeCloudBrainAll
+ }
+ //else {
+ // log.Error("listType(%s) error", listType)
+ // ctx.ServerError("listType error", errors.New("listType error"))
+ // return
+ //}
+
var jobTypes []string
jobTypes = append(jobTypes, string(models.JobTypeTrain))
tasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{
@@ -481,7 +541,7 @@ func TrainJobIndex(ctx *context.Context) {
PageSize: setting.UI.IssuePagingNum,
},
RepoID: repo.ID,
- Type: models.TypeCloudBrainTwo,
+ Type: typeCloudBrain,
JobTypeNot: false,
JobTypes: jobTypes,
IsLatestVersion: modelarts.IsLatestVersion,
@@ -494,11 +554,16 @@ func TrainJobIndex(ctx *context.Context) {
for i, task := range tasks {
tasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain)
tasks[i].CanModify = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain)
- tasks[i].ComputeResource = models.NPUResource
+ if task.Cloudbrain.Type == models.TypeCloudBrainOne {
+ tasks[i].ComputeResource = models.GPUResource
+ } else if task.Cloudbrain.Type == models.TypeCloudBrainTwo {
+ tasks[i].ComputeResource = models.NPUResource
+ }
}
pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5)
pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "listType", "ListType")
ctx.Data["Page"] = pager
ctx.Data["PageIsCloudBrain"] = true
@@ -580,6 +645,7 @@ func trainJobNewDataPrepare(ctx *context.Context) error {
return err
}
ctx.Data["config_list"] = configList.ParaConfigs
+ ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo
return nil
}
@@ -746,6 +812,7 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error {
ctx.Data["uuid"] = task.Uuid
ctx.Data["flavor_code"] = task.FlavorCode
ctx.Data["engine_id"] = task.EngineID
+ ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo
configList, err := getConfigList(modelarts.PerPage, 1, modelarts.SortByCreateTime, "desc", "", modelarts.ConfigTypeCustom)
if err != nil {
@@ -953,17 +1020,9 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
return
}
- //todo: del local code?
-
var parameters models.Parameters
param := make([]models.Parameter, 0)
- param = append(param, models.Parameter{
- Label: modelarts.TrainUrl,
- Value: outputObsPath,
- }, models.Parameter{
- Label: modelarts.DataUrl,
- Value: dataPath,
- })
+ existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), ¶meters)
if err != nil {
@@ -974,6 +1033,9 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
}
for _, parameter := range parameters.Parameter {
+ if parameter.Label == modelarts.DeviceTarget {
+ existDeviceTarget = true
+ }
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
@@ -982,9 +1044,22 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
}
}
}
+ if !existDeviceTarget {
+ param = append(param, models.Parameter{
+ Label: modelarts.DeviceTarget,
+ Value: modelarts.Ascend,
+ })
+ }
//save param config
if isSaveParam == "on" {
+ saveparams := append(param, models.Parameter{
+ Label: modelarts.TrainUrl,
+ Value: outputObsPath,
+ }, models.Parameter{
+ Label: modelarts.DataUrl,
+ Value: dataPath,
+ })
if form.ParameterTemplateName == "" {
log.Error("ParameterTemplateName is empty")
trainJobNewDataPrepare(ctx)
@@ -1006,7 +1081,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: poolID,
- Parameter: param,
+ Parameter: saveparams,
})
if err != nil {
@@ -1032,7 +1107,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
LogUrl: logObsPath,
PoolID: poolID,
Uuid: uuid,
- Parameters: parameters.Parameter,
+ Parameters: param,
CommitID: commitID,
IsLatestVersion: isLatestVersion,
BranchName: branch_name,
@@ -1168,13 +1243,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
var parameters models.Parameters
param := make([]models.Parameter, 0)
- param = append(param, models.Parameter{
- Label: modelarts.TrainUrl,
- Value: outputObsPath,
- }, models.Parameter{
- Label: modelarts.DataUrl,
- Value: dataPath,
- })
+ existDeviceTarget := true
if len(params) != 0 {
err := json.Unmarshal([]byte(params), ¶meters)
if err != nil {
@@ -1183,8 +1252,10 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
ctx.RenderWithErr("运行参数错误", tplModelArtsTrainJobVersionNew, &form)
return
}
-
for _, parameter := range parameters.Parameter {
+ if parameter.Label == modelarts.DeviceTarget {
+ existDeviceTarget = true
+ }
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
@@ -1193,9 +1264,22 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
}
}
}
+ if !existDeviceTarget {
+ param = append(param, models.Parameter{
+ Label: modelarts.DeviceTarget,
+ Value: modelarts.Ascend,
+ })
+ }
//save param config
if isSaveParam == "on" {
+ saveparams := append(param, models.Parameter{
+ Label: modelarts.TrainUrl,
+ Value: outputObsPath,
+ }, models.Parameter{
+ Label: modelarts.DataUrl,
+ Value: dataPath,
+ })
if form.ParameterTemplateName == "" {
log.Error("ParameterTemplateName is empty")
versionErrorDataPrepare(ctx, form)
@@ -1217,7 +1301,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: poolID,
- Parameter: parameters.Parameter,
+ Parameter: saveparams,
})
if err != nil {
@@ -1228,12 +1312,6 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
}
}
- if err != nil {
- log.Error("getFlavorNameByEngineID(%s) failed:%v", engineID, err.Error())
- ctx.RenderWithErr(err.Error(), tplModelArtsTrainJobVersionNew, &form)
- return
- }
-
task, err := models.GetCloudbrainByJobIDAndVersionName(jobID, PreVersionName)
if err != nil {
log.Error("GetCloudbrainByJobIDAndVersionName(%s) failed:%v", jobID, err.Error())
@@ -1257,7 +1335,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ
PoolID: poolID,
Uuid: uuid,
Params: form.Params,
- Parameters: parameters.Parameter,
+ Parameters: param,
PreVersionId: task.VersionID,
CommitID: commitID,
BranchName: branch_name,
@@ -1535,6 +1613,7 @@ func trainJobGetLog(jobID string) (*models.GetTrainJobLogFileNamesResult, *model
func TrainJobDel(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
+ var listType = ctx.Query("listType")
repo := ctx.Repo.Repository
var jobTypes []string
@@ -1576,12 +1655,13 @@ func TrainJobDel(ctx *context.Context) {
if ctx.IsUserSiteAdmin() && isAdminPage == "true" {
ctx.Redirect(setting.AppSubURL + "/admin" + "/cloudbrains")
} else {
- ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType)
}
}
func TrainJobStop(ctx *context.Context) {
var jobID = ctx.Params(":jobid")
+ var listType = ctx.Query("listType")
task := ctx.Cloudbrain
_, err := modelarts.StopTrainJob(jobID, strconv.FormatInt(task.VersionID, 10))
@@ -1591,7 +1671,7 @@ func TrainJobStop(ctx *context.Context) {
return
}
- ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job")
+ ctx.Redirect(setting.AppSubURL + ctx.Repo.RepoLink + "/modelarts/train-job?listType=" + listType)
}
func canUserCreateTrainJob(uid int64) (bool, error) {
@@ -1782,7 +1862,6 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
return
}
- //todo: del local code?
var parameters models.Parameters
param := make([]models.Parameter, 0)
param = append(param, models.Parameter{
@@ -1792,6 +1871,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})
+ existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), ¶meters)
if err != nil {
@@ -1802,6 +1882,9 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
}
for _, parameter := range parameters.Parameter {
+ if parameter.Label == modelarts.DeviceTarget {
+ existDeviceTarget = true
+ }
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
@@ -1810,6 +1893,12 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
}
}
}
+ if !existDeviceTarget {
+ param = append(param, models.Parameter{
+ Label: modelarts.DeviceTarget,
+ Value: modelarts.Ascend,
+ })
+ }
req := &modelarts.GenerateInferenceJobReq{
JobName: jobName,
@@ -1977,6 +2066,7 @@ func inferenceJobNewDataPrepare(ctx *context.Context) error {
New: MODEL_LATEST,
})
ctx.Data["MODEL_COUNT"] = model_count
+ ctx.Data["cloudbraintype"] = models.TypeCloudBrainTwo
return nil
}
@@ -2246,7 +2336,7 @@ func SetJobCount(ctx *context.Context) {
repoId := ctx.Repo.Repository.ID
_, jobCount, err := models.Cloudbrains(&models.CloudbrainsOptions{
RepoID: repoId,
- Type: modelarts.DebugType,
+ Type: models.TypeCloudBrainAll,
})
if err != nil {
ctx.ServerError("Get job faild:", err)
diff --git a/routers/repo/setting.go b/routers/repo/setting.go
index 5b057dbe5..af28f3290 100644
--- a/routers/repo/setting.go
+++ b/routers/repo/setting.go
@@ -245,10 +245,6 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) {
// This section doesn't require repo_name/RepoName to be set in the form, don't show it
// as an error on the UI for this action
ctx.Data["Err_RepoName"] = nil
- if err := models.CreateDefaultDatasetToRepo(repo); err != nil {
- ctx.ServerError("CreateDefaultDatasetToRepo", err)
- return
- }
if form.EnableDataset && !models.UnitTypeDatasets.UnitGlobalDisabled() {
units = append(units, models.RepoUnit{
diff --git a/routers/routes/routes.go b/routers/routes/routes.go
index a2e969a60..1f950ef7d 100755
--- a/routers/routes/routes.go
+++ b/routers/routes/routes.go
@@ -323,6 +323,9 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/action/notification", routers.ActionNotification)
m.Get("/recommend/org", routers.RecommendOrgFromPromote)
m.Get("/recommend/repo", routers.RecommendRepoFromPromote)
+ m.Post("/all/search/", routers.Search)
+ m.Get("/all/search/", routers.EmptySearch)
+ m.Get("/all/dosearch/", routers.SearchApi)
m.Get("/home/term", routers.HomeTerm)
m.Group("/explore", func() {
m.Get("", func(ctx *context.Context) {
@@ -587,6 +590,8 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/delete", repo.DeleteAttachment)
m.Get("/get_pre_url", repo.GetPresignedPutObjectURL)
m.Post("/add", repo.AddAttachment)
+
+ m.Post("/edit", bindIgnErr(auth.EditAttachmentForm{}), repo.EditAttachment)
m.Post("/private", repo.UpdatePublicAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
@@ -979,7 +984,24 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/datasets", func() {
m.Get("", reqRepoDatasetReader, repo.DatasetIndex)
- m.Post("", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
+ m.Put("/:id/:action", reqRepoDatasetReader, repo.DatasetAction)
+ m.Get("/create", reqRepoDatasetWriter, repo.CreateDataset)
+ m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost)
+ m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
+ m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
+ m.Get("/current_repo", repo.CurrentRepoDataset)
+ m.Get("/my_datasets", repo.MyDatasets)
+ m.Get("/public_datasets", repo.PublicDataset)
+ m.Get("/my_favorite", repo.MyFavoriteDataset)
+
+ m.Group("/status", func() {
+ m.Get("/:uuid", repo.GetDatasetStatus)
+ })
+
+ m.Group("/attachments", func() {
+ m.Get("/upload", repo.UploadAttachmentUI)
+ m.Get("/edit/:id", repo.EditAttachmentUI)
+ }, reqSignIn)
m.Group("/dirs", func() {
m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex)
@@ -1017,6 +1039,19 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainBenchmarkCreate)
m.Get("/get_child_types", repo.GetChildTypes)
})
+
+ m.Group("/train-job", func() {
+ m.Group("/:jobid", func() {
+ m.Get("", reqRepoCloudBrainReader, repo.CloudBrainTrainJobShow)
+ m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainTrainJobDel)
+ //m.Get("/models", reqRepoCloudBrainReader, repo.CloudBrainShowModels)
+ m.Get("/download_model", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.CloudBrainDownloadModel)
+ //m.Get("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, repo.TrainJobNewVersion)
+ //m.Post("/create_version", reqWechatBind, cloudbrain.AdminOrJobCreaterRightForTrain, bindIgnErr(auth.CreateModelArtsTrainJobForm{}), repo.TrainJobCreateVersion)
+ })
+ m.Get("/create", reqWechatBind, reqRepoCloudBrainWriter, repo.CloudBrainTrainJobNew)
+ m.Post("/create", reqWechatBind, reqRepoCloudBrainWriter, bindIgnErr(auth.CreateCloudBrainForm{}), repo.CloudBrainCreate)
+ })
}, context.RepoRef())
m.Group("/modelmanage", func() {
m.Post("/create_model", reqRepoModelManageWriter, repo.SaveModel)
diff --git a/routers/search.go b/routers/search.go
new file mode 100644
index 000000000..bc1bc5fac
--- /dev/null
+++ b/routers/search.go
@@ -0,0 +1,1190 @@
+package routers
+
+import (
+ "encoding/json"
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/context"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "github.com/olivere/elastic/v7"
+)
+
+type SearchRes struct {
+ Total int64
+ Result []map[string]interface{}
+ PrivateTotal int64
+}
+
+var client *elastic.Client
+
+func InitESClient() {
+ ESSearchUrl := setting.ESSearchURL
+ var err error
+ client, err = elastic.NewClient(elastic.SetSniff(false), elastic.SetURL(ESSearchUrl))
+ if err != nil {
+ log.Info("es init error.")
+ //panic(err)
+ }
+}
+
+func EmptySearch(ctx *context.Context) {
+ log.Info("search template.")
+ ctx.Data["Keyword"] = ""
+ ctx.HTML(200, "explore/search_new")
+}
+
+func Search(ctx *context.Context) {
+ log.Info("search template.")
+ keyword := strings.Trim(ctx.Query("q"), " ")
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["SortType"] = "newest"
+ ctx.HTML(200, "explore/search_new")
+}
+
+func SearchApi(ctx *context.Context) {
+ TableName := ctx.Query("TableName")
+ Key := ctx.Query("Key")
+ Page := ctx.QueryInt("Page")
+ PageSize := ctx.QueryInt("PageSize")
+ OnlyReturnNum := ctx.QueryBool("OnlyReturnNum")
+ OnlySearchLabel := ctx.QueryBool("OnlySearchLabel")
+
+ if Page <= 0 {
+ Page = 1
+ }
+ if PageSize <= 0 || PageSize > 200 {
+ PageSize = setting.UI.IssuePagingNum
+ }
+ if Key != "" && !OnlyReturnNum {
+ go models.SaveSearchKeywordToDb(Key)
+ }
+ if TableName == "repository" {
+ if OnlySearchLabel {
+ searchRepoByLabel(ctx, Key, Page, PageSize)
+ } else {
+ searchRepo(ctx, "repository-es-index", Key, Page, PageSize, OnlyReturnNum)
+ }
+ return
+ } else if TableName == "issue" {
+ searchIssueOrPr(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum, "f")
+ return
+ } else if TableName == "user" {
+ searchUserOrOrg(ctx, "user-es-index", Key, Page, PageSize, true, OnlyReturnNum)
+ return
+ } else if TableName == "org" {
+ searchUserOrOrg(ctx, "user-es-index", Key, Page, PageSize, false, OnlyReturnNum)
+ return
+ } else if TableName == "dataset" {
+ searchDataSet(ctx, "dataset-es-index", Key, Page, PageSize, OnlyReturnNum)
+ return
+ } else if TableName == "pr" {
+ searchIssueOrPr(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum, "t")
+ //searchPR(ctx, "issue-es-index", Key, Page, PageSize, OnlyReturnNum)
+ return
+ }
+}
+
+func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int) {
+ /*
+ 项目, ES名称: repository-es-index
+ 搜索:
+ name character varying(255) , 项目名称
+ description text, 项目描述
+ topics json, 标签
+ 排序:
+ updated_unix
+ num_watches,
+ num_stars,
+ num_forks,
+ */
+ SortBy := ctx.Query("SortBy")
+ PrivateTotal := ctx.QueryInt("PrivateTotal")
+ WebTotal := ctx.QueryInt("WebTotal")
+ ascending := ctx.QueryBool("Ascending")
+ language := ctx.Query("language")
+ if language == "" {
+ language = "zh-CN"
+ }
+ from := (Page - 1) * PageSize
+ resultObj := &SearchRes{}
+ log.Info("WebTotal=" + fmt.Sprint(WebTotal))
+ log.Info("PrivateTotal=" + fmt.Sprint(PrivateTotal))
+ resultObj.Result = make([]map[string]interface{}, 0)
+ if from == 0 {
+ WebTotal = 0
+ }
+ if ctx.User != nil && (from < PrivateTotal || from == 0) {
+ orderBy := models.SearchOrderByRecentUpdated
+ switch SortBy {
+ case "updated_unix.keyword":
+ orderBy = models.SearchOrderByRecentUpdated
+ case "num_stars":
+ orderBy = models.SearchOrderByStarsReverse
+ case "num_forks":
+ orderBy = models.SearchOrderByForksReverse
+ case "num_watches":
+ orderBy = models.SearchOrderByWatches
+ }
+ log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil))
+ repos, count, err := models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: models.ListOptions{
+ Page: Page,
+ PageSize: PageSize,
+ },
+ Actor: ctx.User,
+ OrderBy: orderBy,
+ Private: true,
+ OnlyPrivate: true,
+ TopicOnly: true,
+ TopicName: Key,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ })
+ if err != nil {
+ ctx.JSON(200, "")
+ return
+ }
+ resultObj.PrivateTotal = count
+ if repos.Len() > 0 {
+ log.Info("Query private repo number is:" + fmt.Sprint(repos.Len()))
+ makePrivateRepo(repos, resultObj, Key, language)
+ } else {
+ log.Info("not found private repo,keyword=" + Key)
+ }
+ if repos.Len() >= PageSize {
+ if WebTotal > 0 {
+ resultObj.Total = int64(WebTotal)
+ ctx.JSON(200, resultObj)
+ return
+ }
+ }
+ } else {
+ if ctx.User == nil {
+ resultObj.PrivateTotal = 0
+ } else {
+ resultObj.PrivateTotal = int64(PrivateTotal)
+ }
+ }
+
+ from = from - PrivateTotal
+ if from < 0 {
+ from = 0
+ }
+ Size := PageSize - len(resultObj.Result)
+
+ log.Info("query searchRepoByLabel start")
+ if Key != "" {
+ boolQ := elastic.NewBoolQuery()
+ topicsQuery := elastic.NewMatchQuery("topics", Key)
+ boolQ.Should(topicsQuery)
+
+ res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeRepoResult(res, "", false, language)
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ ctx.JSON(200, "")
+ }
+ } else {
+ ctx.JSON(200, "")
+ }
+}
+
+func getSort(SortBy string, ascending bool) elastic.Sorter {
+ var sort elastic.Sorter
+ sort = elastic.NewScoreSort()
+ if SortBy != "" {
+ if SortBy == "default" {
+ return sort
+ }
+ return elastic.NewFieldSort(SortBy).Order(ascending)
+ }
+ return sort
+}
+
+func searchRepo(ctx *context.Context, TableName string, Key string, Page int, PageSize int, OnlyReturnNum bool) {
+ /*
+ 项目, ES名称: repository-es-index
+ 搜索:
+ name character varying(255) , 项目名称
+ description text, 项目描述
+ topics json, 标签
+ 排序:
+ updated_unix
+ num_watches,
+ num_stars,
+ num_forks,
+ */
+
+ SortBy := ctx.Query("SortBy")
+ PrivateTotal := ctx.QueryInt("PrivateTotal")
+ WebTotal := ctx.QueryInt("WebTotal")
+ ascending := ctx.QueryBool("Ascending")
+ from := (Page - 1) * PageSize
+ resultObj := &SearchRes{}
+ log.Info("WebTotal=" + fmt.Sprint(WebTotal))
+ log.Info("PrivateTotal=" + fmt.Sprint(PrivateTotal))
+ resultObj.Result = make([]map[string]interface{}, 0)
+ if from == 0 {
+ WebTotal = 0
+ }
+ language := ctx.Query("language")
+ if language == "" {
+ language = "zh-CN"
+ }
+ if ctx.User != nil && (from < PrivateTotal || from == 0) {
+ orderBy := models.SearchOrderByRecentUpdated
+ switch SortBy {
+ case "updated_unix.keyword":
+ orderBy = models.SearchOrderByRecentUpdated
+ case "num_stars":
+ orderBy = models.SearchOrderByStarsReverse
+ case "num_forks":
+ orderBy = models.SearchOrderByForksReverse
+ case "num_watches":
+ orderBy = models.SearchOrderByWatches
+ }
+ log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil))
+ repos, count, err := models.SearchRepository(&models.SearchRepoOptions{
+ ListOptions: models.ListOptions{
+ Page: Page,
+ PageSize: PageSize,
+ },
+ Actor: ctx.User,
+ OrderBy: orderBy,
+ Private: true,
+ OnlyPrivate: true,
+ Keyword: Key,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ OnlySearchPrivate: true,
+ })
+ if err != nil {
+ ctx.JSON(200, "")
+ return
+ }
+ resultObj.PrivateTotal = count
+ if repos.Len() > 0 {
+ log.Info("Query private repo number is:" + fmt.Sprint(repos.Len()))
+ makePrivateRepo(repos, resultObj, Key, language)
+ } else {
+ log.Info("not found private repo,keyword=" + Key)
+ }
+ if repos.Len() >= PageSize {
+ if WebTotal > 0 {
+ resultObj.Total = int64(WebTotal)
+ ctx.JSON(200, resultObj)
+ return
+ }
+ }
+ } else {
+ if ctx.User == nil {
+ resultObj.PrivateTotal = 0
+ } else {
+ resultObj.PrivateTotal = int64(PrivateTotal)
+ }
+ }
+
+ from = from - PrivateTotal
+ if from < 0 {
+ from = 0
+ }
+ Size := PageSize - len(resultObj.Result)
+
+ log.Info("query searchRepo start")
+ if Key != "" {
+ boolQ := elastic.NewBoolQuery()
+ nameQuery := elastic.NewMatchQuery("alias", Key).Boost(1024).QueryName("f_first")
+ descriptionQuery := elastic.NewMatchQuery("description", Key).Boost(1.5).QueryName("f_second")
+ topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third")
+ boolQ.Should(nameQuery, descriptionQuery, topicsQuery)
+
+ res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeRepoResult(res, Key, OnlyReturnNum, language)
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ isNeedSort := false
+ if len(resultObj.Result) > 0 {
+ isNeedSort = true
+ }
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ if isNeedSort {
+ sortRepo(resultObj.Result, SortBy, ascending)
+ }
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ ctx.JSON(200, "")
+ }
+ } else {
+ log.Info("query all content.")
+ //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
+ res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeRepoResult(res, "", OnlyReturnNum, language)
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ ctx.JSON(200, "")
+ }
+ }
+}
+
+func sortRepo(Result []map[string]interface{}, SortBy string, ascending bool) {
+ orderBy := ""
+ switch SortBy {
+ case "updated_unix.keyword":
+ orderBy = "updated_unix"
+ case "num_stars":
+ orderBy = "num_stars"
+ case "num_forks":
+ orderBy = "num_forks"
+ case "num_watches":
+ orderBy = "num_watches"
+ }
+ sort.Slice(Result, func(i, j int) bool {
+ return getInt(Result[i][orderBy], orderBy) > getInt(Result[j][orderBy], orderBy)
+ })
+}
+
+func getInt(tmp interface{}, orderBy string) int64 {
+ timeInt, err := strconv.ParseInt(fmt.Sprint(tmp), 10, 64)
+ if err == nil {
+ return timeInt
+ } else {
+ log.Info("convert " + orderBy + " error type=" + fmt.Sprint(tmp))
+ }
+ return -1
+}
+
+func makePrivateRepo(repos models.RepositoryList, res *SearchRes, keyword string, language string) {
+
+ for _, repo := range repos {
+ record := make(map[string]interface{})
+ record["id"] = repo.ID
+ record["name"] = makeHighLight(keyword, repo.Name)
+ record["real_name"] = repo.Name
+ record["owner_name"] = repo.OwnerName
+ record["description"] = truncLongText(makeHighLight(keyword, repo.Description), true)
+
+ hightTopics := make([]string, 0)
+ if len(repo.Topics) > 0 {
+ for _, t := range repo.Topics {
+ hightTopics = append(hightTopics, makeHighLight(keyword, t))
+ }
+ }
+ record["hightTopics"] = hightTopics
+
+ record["num_watches"] = repo.NumWatches
+ record["num_stars"] = repo.NumStars
+ record["num_forks"] = repo.NumForks
+ record["alias"] = truncLongText(makeHighLight(keyword, repo.Alias), true)
+ record["lower_alias"] = repo.LowerAlias
+ record["topics"] = repo.Topics
+ record["avatar"] = repo.RelAvatarLink()
+ if len(repo.RelAvatarLink()) == 0 {
+ record["avatar"] = setting.RepositoryAvatarFallbackImage
+ }
+ record["updated_unix"] = repo.UpdatedUnix
+ record["updated_html"] = timeutil.TimeSinceUnix(repo.UpdatedUnix, language)
+ lang, err := repo.GetTopLanguageStats(1)
+ if err == nil && len(lang) > 0 {
+ record["lang"] = lang[0].Language
+ } else {
+ record["lang"] = ""
+ }
+ record["is_private"] = true
+ res.Result = append(res.Result, record)
+ }
+}
+
+func makeHighLight(keyword string, dest string) string {
+
+ dest = replaceIngoreUpperOrLower(dest, strings.ToLower(dest), strings.ToLower(keyword))
+
+ return dest
+}
+
+func replaceIngoreUpperOrLower(dest string, destLower string, keywordLower string) string {
+ re := ""
+ last := 0
+ lenDestLower := len(destLower)
+ lenkeywordLower := len(keywordLower)
+ for i := 0; i < lenDestLower; i++ {
+ if destLower[i] == keywordLower[0] {
+ isFind := true
+ for j := 1; j < lenkeywordLower; j++ {
+ if (i+j) < lenDestLower && keywordLower[j] != destLower[i+j] {
+ isFind = false
+ break
+ }
+ }
+ if isFind && (i+lenkeywordLower) <= lenDestLower {
+ re += dest[last:i] + "\u003cfont color='red'\u003e" + dest[i:(i+lenkeywordLower)] + "\u003c/font\u003e"
+ i = i + lenkeywordLower
+ last = i
+ }
+ }
+ }
+ if last < lenDestLower {
+ re += dest[last:lenDestLower]
+ }
+ return re
+}
+
+func makeRepoResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool, language string) *SearchRes {
+ total := sRes.Hits.TotalHits.Value
+ result := make([]map[string]interface{}, 0)
+ if !OnlyReturnNum {
+ for i, hit := range sRes.Hits.Hits {
+ log.Info("this is repo query " + fmt.Sprint(i) + " result.")
+ recordSource := make(map[string]interface{})
+ source, err := hit.Source.MarshalJSON()
+
+ if err == nil {
+ err = json.Unmarshal(source, &recordSource)
+ if err == nil {
+ record := make(map[string]interface{})
+ record["id"] = hit.Id
+ record["alias"] = getLabelValue("alias", recordSource, hit.Highlight)
+ record["real_name"] = recordSource["name"]
+ record["owner_name"] = recordSource["owner_name"]
+ if recordSource["description"] != nil {
+ desc := getLabelValue("description", recordSource, hit.Highlight)
+ record["description"] = dealLongText(desc, Key, hit.MatchedQueries)
+ } else {
+ record["description"] = ""
+ }
+
+ record["hightTopics"] = jsonStrToArray(getLabelValue("topics", recordSource, hit.Highlight))
+ record["num_watches"] = recordSource["num_watches"]
+ record["num_stars"] = recordSource["num_stars"]
+ record["num_forks"] = recordSource["num_forks"]
+ record["lower_alias"] = recordSource["lower_alias"]
+ if recordSource["topics"] != nil {
+ topicsStr := recordSource["topics"].(string)
+ log.Info("topicsStr=" + topicsStr)
+ if topicsStr != "null" {
+ record["topics"] = jsonStrToArray(topicsStr)
+ }
+ }
+ if recordSource["avatar"] != nil {
+ avatarstr := recordSource["avatar"].(string)
+ if len(avatarstr) == 0 {
+ record["avatar"] = setting.RepositoryAvatarFallbackImage
+ } else {
+ record["avatar"] = setting.AppSubURL + "/repo-avatars/" + avatarstr
+ }
+ }
+ record["updated_unix"] = recordSource["updated_unix"]
+ setUpdateHtml(record, recordSource["updated_unix"].(string), language)
+
+ record["lang"] = recordSource["lang"]
+ record["is_private"] = false
+ result = append(result, record)
+ } else {
+ log.Info("deal repo source error," + err.Error())
+ }
+ } else {
+ log.Info("deal repo source error," + err.Error())
+ }
+ }
+ }
+ returnObj := &SearchRes{
+ Total: total,
+ Result: result,
+ }
+
+ return returnObj
+}
+
+func setUpdateHtml(record map[string]interface{}, updated_unix string, language string) {
+ timeInt, err := strconv.ParseInt(updated_unix, 10, 64)
+ if err == nil {
+ record["updated_html"] = timeutil.TimeSinceUnix(timeutil.TimeStamp(timeInt), language)
+ }
+}
+
+func jsonStrToArray(str string) []string {
+ b := []byte(str)
+ strs := make([]string, 0)
+ err := json.Unmarshal(b, &strs)
+ if err != nil {
+ log.Info("convert str arrar error, str=" + str)
+ }
+ return strs
+}
+
+func dealLongText(text string, Key string, MatchedQueries []string) string {
+ var isNeedToDealText bool
+ isNeedToDealText = false
+ if len(MatchedQueries) > 0 && Key != "" {
+ if MatchedQueries[0] == "f_second" || MatchedQueries[0] == "f_third" {
+ isNeedToDealText = true
+ }
+ }
+ return truncLongText(text, isNeedToDealText)
+}
+
+func truncLongText(text string, isNeedToDealText bool) string {
+ startStr := "color="
+ textRune := []rune(text)
+ stringlen := len(textRune)
+ if isNeedToDealText && stringlen > 200 {
+ index := findFont(textRune, []rune(startStr))
+ if index > 0 {
+ start := index - 50
+ if start < 0 {
+ start = 0
+ }
+ end := index + 150
+ if end >= stringlen {
+ end = stringlen
+ }
+ return trimFontHtml(textRune[start:end]) + "..."
+ } else {
+ return trimFontHtml(textRune[0:200]) + "..."
+ }
+ } else {
+ if stringlen > 200 {
+ return trimFontHtml(textRune[0:200]) + "..."
+ } else {
+ return text
+ }
+ }
+}
+
+func trimFontHtml(text []rune) string {
+ startRune := rune('<')
+ endRune := rune('>')
+ count := 0
+ for i := 0; i < len(text); i++ {
+ if text[i] == startRune { //start <
+ re := false
+ j := i + 1
+ for ; j < len(text); j++ {
+ if text[j] == endRune {
+ re = true
+ break
+ }
+ }
+ if re { //found >
+ i = j
+ count++
+ } else {
+ if count%2 == 1 {
+ return string(text[0:i]) + ""
+ } else {
+ return string(text[0:i])
+ }
+
+ }
+ }
+ }
+ return string(text)
+}
+
+func trimHrefHtml(result string) string {
+ result = strings.Replace(result, "", "", -1)
+ result = strings.Replace(result, "\n", "", -1)
+ var index int
+ for {
+ index = findSubstr(result, 0, "")
+ if sIndex != -1 {
+ result = result[0:index] + result[sIndex+1:]
+ } else {
+ result = result[0:index] + result[index+2:]
+ }
+ } else {
+ break
+ }
+ }
+ return result
+}
+
+func findFont(text []rune, childText []rune) int {
+ for i := 0; i < len(text); i++ {
+ if text[i] == childText[0] {
+ re := true
+ for j, k := range childText {
+ if k != text[i+j] {
+ re = false
+ break
+ }
+ }
+ if re {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+func findSubstr(text string, startindex int, childText string) int {
+ for i := startindex; i < len(text); i++ {
+ if text[i] == childText[0] {
+ re := true
+ for k := range childText {
+ if childText[k] != text[i+k] {
+ re = false
+ break
+ }
+ }
+ if re {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page int, PageSize int, IsQueryUser bool, OnlyReturnNum bool) {
+ /*
+ 用户或者组织 ES名称: user-es-index
+ 搜索:
+ name , 名称
+ full_name 全名
+ description 描述或者简介
+ 排序:
+ created_unix
+ 名称字母序
+ */
+ SortBy := ctx.Query("SortBy")
+ ascending := ctx.QueryBool("Ascending")
+ boolQ := elastic.NewBoolQuery()
+
+ typeValue := 1
+ if IsQueryUser {
+ typeValue = 0
+ }
+ UserOrOrgQuery := elastic.NewTermQuery("type", typeValue)
+ if Key != "" {
+ boolKeyQ := elastic.NewBoolQuery()
+ log.Info("user or org Key=" + Key)
+ nameQuery := elastic.NewMatchQuery("name", Key).Boost(2).QueryName("f_first")
+ full_nameQuery := elastic.NewMatchQuery("full_name", Key).Boost(1.5).QueryName("f_second")
+ descriptionQuery := elastic.NewMatchQuery("description", Key).Boost(1).QueryName("f_third")
+ boolKeyQ.Should(nameQuery, full_nameQuery, descriptionQuery)
+ boolQ.Must(UserOrOrgQuery, boolKeyQ)
+ } else {
+ boolQ.Must(UserOrOrgQuery)
+ }
+
+ res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ result := makeUserOrOrgResult(res, Key, ctx, OnlyReturnNum)
+ ctx.JSON(200, result)
+ } else {
+ log.Info("query es error," + err.Error())
+ ctx.JSON(200, "")
+ }
+}
+
+func getLabelValue(key string, recordSource map[string]interface{}, searchHighliht elastic.SearchHitHighlight) string {
+ if value, ok := searchHighliht[key]; !ok {
+ if recordSource[key] != nil {
+ return recordSource[key].(string)
+ } else {
+ return ""
+ }
+ } else {
+ return value[0]
+ }
+}
+
+func makeUserOrOrgResult(sRes *elastic.SearchResult, Key string, ctx *context.Context, OnlyReturnNum bool) *SearchRes {
+ total := sRes.Hits.TotalHits.Value
+ result := make([]map[string]interface{}, 0)
+ if !OnlyReturnNum {
+ for i, hit := range sRes.Hits.Hits {
+ log.Info("this is user query " + fmt.Sprint(i) + " result.")
+ recordSource := make(map[string]interface{})
+ source, err := hit.Source.MarshalJSON()
+
+ if err == nil {
+ err = json.Unmarshal(source, &recordSource)
+ if err == nil {
+ record := make(map[string]interface{})
+ record["id"] = hit.Id
+ record["name"] = getLabelValue("name", recordSource, hit.Highlight)
+ record["real_name"] = recordSource["name"]
+ record["full_name"] = getLabelValue("full_name", recordSource, hit.Highlight)
+ if recordSource["description"] != nil {
+ desc := getLabelValue("description", recordSource, hit.Highlight)
+ record["description"] = dealLongText(desc, Key, hit.MatchedQueries)
+ } else {
+ record["description"] = ""
+ }
+ if ctx.User != nil {
+ record["email"] = recordSource["email"]
+ } else {
+ record["email"] = ""
+ }
+
+ record["location"] = recordSource["location"]
+ record["website"] = recordSource["website"]
+ record["num_repos"] = recordSource["num_repos"]
+ record["num_teams"] = recordSource["num_teams"]
+ record["num_members"] = recordSource["num_members"]
+
+ record["avatar"] = strings.TrimRight(setting.AppSubURL, "/") + "/user/avatar/" + recordSource["name"].(string) + "/" + strconv.Itoa(-1)
+ record["updated_unix"] = recordSource["updated_unix"]
+ record["created_unix"] = recordSource["created_unix"]
+ record["add_time"] = getAddTime(recordSource["created_unix"].(string))
+ result = append(result, record)
+ } else {
+ log.Info("deal user source error," + err.Error())
+ }
+ } else {
+ log.Info("deal user source error," + err.Error())
+ }
+ }
+ }
+ returnObj := &SearchRes{
+ Total: total,
+ Result: result,
+ }
+ return returnObj
+}
+
+func getAddTime(time string) string {
+ timeInt, err := strconv.ParseInt(time, 10, 64)
+ if err == nil {
+ t := timeutil.TimeStamp(timeInt)
+ return t.FormatShort()
+ }
+ return ""
+}
+
+func searchDataSet(ctx *context.Context, TableName string, Key string, Page int, PageSize int, OnlyReturnNum bool) {
+ /*
+ 数据集,ES名称:dataset-es-index
+ 搜索:
+ title , 名称
+ description 描述
+ category 标签
+ file_name 数据集文件名称
+ 排序:
+ download_times
+
+ */
+ log.Info("query searchdataset start")
+ SortBy := ctx.Query("SortBy")
+ ascending := ctx.QueryBool("Ascending")
+ PrivateTotal := ctx.QueryInt("PrivateTotal")
+ WebTotal := ctx.QueryInt("WebTotal")
+ language := ctx.Query("language")
+ if language == "" {
+ language = "zh-CN"
+ }
+ from := (Page - 1) * PageSize
+ if from == 0 {
+ WebTotal = 0
+ }
+ resultObj := &SearchRes{}
+ log.Info("WebTotal=" + fmt.Sprint(WebTotal))
+ log.Info("PrivateTotal=" + fmt.Sprint(PrivateTotal))
+ resultObj.Result = make([]map[string]interface{}, 0)
+
+ if ctx.User != nil && (from < PrivateTotal || from == 0) {
+
+ log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil))
+ datasets, count, err := models.SearchDatasetBySQL(Page, PageSize, Key, ctx.User.ID)
+ if err != nil {
+ ctx.JSON(200, "")
+ return
+ }
+ resultObj.PrivateTotal = count
+ datasetSize := len(datasets)
+ if datasetSize > 0 {
+ log.Info("Query private dataset number is:" + fmt.Sprint(datasetSize) + " count=" + fmt.Sprint(count))
+ makePrivateDataSet(datasets, resultObj, Key, language)
+ } else {
+ log.Info("not found private dataset, keyword=" + Key)
+ }
+ if datasetSize >= PageSize {
+ if WebTotal > 0 { //next page, not first query.
+ resultObj.Total = int64(WebTotal)
+ ctx.JSON(200, resultObj)
+ return
+ }
+ }
+ } else {
+ resultObj.PrivateTotal = int64(PrivateTotal)
+ }
+
+ from = from - PrivateTotal
+ if from < 0 {
+ from = 0
+ }
+ Size := PageSize - len(resultObj.Result)
+
+ boolQ := elastic.NewBoolQuery()
+ if Key != "" {
+ nameQuery := elastic.NewMatchQuery("title", Key).Boost(2).QueryName("f_first")
+ descQuery := elastic.NewMatchQuery("description", Key).Boost(1.5).QueryName("f_second")
+ fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third")
+ categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth")
+ boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery)
+ res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeDatasetResult(res, Key, OnlyReturnNum, language)
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ log.Info("query dataset es count=" + fmt.Sprint(esresult.Total) + " total=" + fmt.Sprint(resultObj.Total))
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ }
+ } else {
+ log.Info("query all datasets.")
+ //搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
+ res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeDatasetResult(res, "", OnlyReturnNum, language)
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ log.Info("query dataset es count=" + fmt.Sprint(esresult.Total) + " total=" + fmt.Sprint(resultObj.Total))
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ ctx.JSON(200, "")
+ }
+ }
+
+}
+
+func makePrivateDataSet(datasets []*models.Dataset, res *SearchRes, Key string, language string) {
+ for _, dataset := range datasets {
+ record := make(map[string]interface{})
+
+ record["id"] = dataset.ID
+ userId := dataset.UserID
+
+ user, errUser := models.GetUserByID(userId)
+ if errUser == nil {
+ record["owerName"] = user.GetDisplayName()
+ record["avatar"] = user.RelAvatarLink()
+ }
+
+ repo, errRepo := models.GetRepositoryByID(dataset.RepoID)
+ if errRepo == nil {
+ log.Info("repo_url=" + repo.FullName())
+ record["repoUrl"] = repo.FullName()
+ record["avatar"] = repo.RelAvatarLink()
+ } else {
+ log.Info("repo err=" + errRepo.Error())
+ }
+
+ record["title"] = makeHighLight(Key, dataset.Title)
+ record["description"] = truncLongText(makeHighLight(Key, dataset.Description), true)
+
+ record["category"] = dataset.Category
+ record["task"] = dataset.Task
+ record["download_times"] = dataset.DownloadTimes
+ record["created_unix"] = dataset.CreatedUnix
+ record["updated_unix"] = repo.UpdatedUnix
+ record["updated_html"] = timeutil.TimeSinceUnix(repo.UpdatedUnix, language)
+
+ res.Result = append(res.Result, record)
+ }
+}
+
+func makeDatasetResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool, language string) *SearchRes {
+ total := sRes.Hits.TotalHits.Value
+ result := make([]map[string]interface{}, 0)
+ if !OnlyReturnNum {
+ for i, hit := range sRes.Hits.Hits {
+ log.Info("this is dataset query " + fmt.Sprint(i) + " result.")
+ recordSource := make(map[string]interface{})
+ source, err := hit.Source.MarshalJSON()
+
+ if err == nil {
+ err = json.Unmarshal(source, &recordSource)
+ if err == nil {
+ record := make(map[string]interface{})
+ record["id"] = hit.Id
+ userIdStr := recordSource["user_id"].(string)
+ userId, cerr := strconv.ParseInt(userIdStr, 10, 64)
+ if cerr == nil {
+ user, errUser := models.GetUserByID(userId)
+ if errUser == nil {
+ record["owerName"] = user.GetDisplayName()
+ record["avatar"] = user.RelAvatarLink()
+ }
+ }
+ setRepoInfo(recordSource, record)
+ record["title"] = getLabelValue("title", recordSource, hit.Highlight)
+ record["category"] = getLabelValue("category", recordSource, hit.Highlight)
+ if recordSource["description"] != nil {
+ desc := getLabelValue("description", recordSource, hit.Highlight)
+ record["description"] = dealLongText(desc, Key, hit.MatchedQueries)
+ } else {
+ record["description"] = ""
+ }
+ record["file_name"] = getDatasetFileName(getLabelValue("file_name", recordSource, hit.Highlight))
+ record["task"] = recordSource["task"]
+ record["download_times"] = recordSource["download_times"]
+ record["created_unix"] = recordSource["created_unix"]
+ setUpdateHtml(record, recordSource["updated_unix"].(string), language)
+ result = append(result, record)
+ } else {
+ log.Info("deal dataset source error," + err.Error())
+ }
+ } else {
+ log.Info("deal dataset source error," + err.Error())
+ }
+ }
+ }
+ returnObj := &SearchRes{
+ Total: total,
+ Result: result,
+ }
+
+ return returnObj
+}
+
+func getDatasetFileName(fileName string) string {
+ slices := strings.Split(fileName, "-#,#-")
+ fileName = strings.Join(slices, ", ")
+ return fileName
+}
+
+func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page int, PageSize int, OnlyReturnNum bool, issueOrPr string) {
+
+ /*
+ 任务,合并请求 ES名称:issue-es-index
+ 搜索:
+ name character varying(255) , 标题
+ content text, 内容
+ comment text, 评论
+ 排序:
+ updated_unix
+ */
+ SortBy := ctx.Query("SortBy")
+ ascending := ctx.QueryBool("Ascending")
+ PrivateTotal := ctx.QueryInt("PrivateTotal")
+ WebTotal := ctx.QueryInt("WebTotal")
+ language := ctx.Query("language")
+ if language == "" {
+ language = "zh-CN"
+ }
+ from := (Page - 1) * PageSize
+ if from == 0 {
+ WebTotal = 0
+ }
+ resultObj := &SearchRes{}
+ log.Info("WebTotal=" + fmt.Sprint(WebTotal))
+ log.Info("PrivateTotal=" + fmt.Sprint(PrivateTotal))
+ resultObj.Result = make([]map[string]interface{}, 0)
+ isPull := false
+ if issueOrPr == "t" {
+ isPull = true
+ }
+
+ if ctx.User != nil && (from < PrivateTotal || from == 0) {
+
+ log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil))
+ issues, count, err := models.SearchPrivateIssueOrPr(Page, PageSize, Key, isPull, ctx.User.ID)
+ if err != nil {
+ ctx.JSON(200, "")
+ return
+ }
+ resultObj.PrivateTotal = count
+ issuesSize := len(issues)
+ if issuesSize > 0 {
+ log.Info("Query private repo issue number is:" + fmt.Sprint(issuesSize) + " count=" + fmt.Sprint(count))
+ makePrivateIssueOrPr(issues, resultObj, Key, language)
+ } else {
+ log.Info("not found private repo issue,keyword=" + Key)
+ }
+ if issuesSize >= PageSize {
+ if WebTotal > 0 { //next page, not first query.
+ resultObj.Total = int64(WebTotal)
+ ctx.JSON(200, resultObj)
+ return
+ }
+ }
+ } else {
+ resultObj.PrivateTotal = int64(PrivateTotal)
+ }
+
+ from = from - PrivateTotal
+ if from < 0 {
+ from = 0
+ }
+ Size := PageSize - len(resultObj.Result)
+
+ boolQ := elastic.NewBoolQuery()
+ isIssueQuery := elastic.NewTermQuery("is_pull", issueOrPr)
+
+ if Key != "" {
+ boolKeyQ := elastic.NewBoolQuery()
+ log.Info("issue Key=" + Key)
+ nameQuery := elastic.NewMatchQuery("name", Key).Boost(2).QueryName("f_first")
+ contentQuery := elastic.NewMatchQuery("content", Key).Boost(1.5).QueryName("f_second")
+ commentQuery := elastic.NewMatchQuery("comment", Key).Boost(1).QueryName("f_third")
+ boolKeyQ.Should(nameQuery, contentQuery, commentQuery)
+ boolQ.Must(isIssueQuery, boolKeyQ)
+ } else {
+ boolQ.Must(isIssueQuery)
+ }
+
+ res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context())
+ if err == nil {
+ searchJson, _ := json.Marshal(res)
+ log.Info("searchJson=" + string(searchJson))
+ esresult := makeIssueResult(res, Key, OnlyReturnNum, language)
+
+ resultObj.Total = resultObj.PrivateTotal + esresult.Total
+ log.Info("query issue es count=" + fmt.Sprint(esresult.Total) + " total=" + fmt.Sprint(resultObj.Total))
+ resultObj.Result = append(resultObj.Result, esresult.Result...)
+ ctx.JSON(200, resultObj)
+ } else {
+ log.Info("query es error," + err.Error())
+ }
+}
+
+func queryHighlight(names ...string) *elastic.Highlight {
+ re := elastic.NewHighlight()
+ for i := 0; i < len(names); i++ {
+ field := &elastic.HighlighterField{
+ Name: names[i],
+ }
+ re.Fields(field)
+ }
+ re.PreTags("")
+ re.PostTags("")
+ return re
+}
+
+func setRepoInfo(recordSource map[string]interface{}, record map[string]interface{}) {
+ repoIdstr := recordSource["repo_id"].(string)
+ repoId, cerr := strconv.ParseInt(repoIdstr, 10, 64)
+ if cerr == nil {
+ repo, errRepo := models.GetRepositoryByID(repoId)
+ if errRepo == nil {
+ log.Info("repo_url=" + repo.FullName())
+ record["repoUrl"] = repo.FullName()
+ record["avatar"] = repo.RelAvatarLink()
+ } else {
+ log.Info("repo err=" + errRepo.Error())
+ }
+ } else {
+ log.Info("parse int err=" + cerr.Error())
+ }
+}
+
+func makePrivateIssueOrPr(issues []*models.Issue, res *SearchRes, Key string, language string) {
+ for _, issue := range issues {
+ record := make(map[string]interface{})
+ record["id"] = issue.ID
+ record["repo_id"] = issue.RepoID
+
+ repo, errRepo := models.GetRepositoryByID(issue.RepoID)
+ if errRepo == nil {
+ log.Info("repo_url=" + repo.FullName())
+ record["repoUrl"] = repo.FullName()
+ record["avatar"] = repo.RelAvatarLink()
+ } else {
+ log.Info("repo err=" + errRepo.Error())
+ }
+ record["name"] = makeHighLight(Key, issue.Title)
+ record["content"] = truncLongText(makeHighLight(Key, issue.Content), true)
+
+ if issue.IsPull {
+ pr, err1 := issue.GetPullRequest()
+ if err1 == nil && pr != nil {
+ record["pr_id"] = pr.ID
+ }
+ }
+ record["index"] = issue.Index
+ record["num_comments"] = issue.NumComments
+ record["is_closed"] = issue.IsClosed
+ record["updated_unix"] = issue.UpdatedUnix
+ record["updated_html"] = timeutil.TimeSinceUnix(repo.UpdatedUnix, language)
+ res.Result = append(res.Result, record)
+ }
+}
+
+func makeIssueResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool, language string) *SearchRes {
+ total := sRes.Hits.TotalHits.Value
+ result := make([]map[string]interface{}, 0)
+ if !OnlyReturnNum {
+ for i, hit := range sRes.Hits.Hits {
+ log.Info("this is issue query " + fmt.Sprint(i) + " result.")
+ recordSource := make(map[string]interface{})
+ source, err := hit.Source.MarshalJSON()
+
+ if err == nil {
+ err = json.Unmarshal(source, &recordSource)
+ if err == nil {
+ record := make(map[string]interface{})
+ record["id"] = hit.Id
+ record["repo_id"] = recordSource["repo_id"]
+ log.Info("recordSource[\"repo_id\"]=" + fmt.Sprint(recordSource["repo_id"]))
+ setRepoInfo(recordSource, record)
+ record["name"] = getLabelValue("name", recordSource, hit.Highlight)
+ if recordSource["content"] != nil {
+ desc := getLabelValue("content", recordSource, hit.Highlight)
+ record["content"] = dealLongText(desc, Key, hit.MatchedQueries)
+ if _, ok := hit.Highlight["content"]; !ok {
+ if _, ok_comment := hit.Highlight["comment"]; ok_comment {
+ desc := getLabelValue("comment", recordSource, hit.Highlight)
+ record["content"] = trimHrefHtml(dealLongText(desc, Key, hit.MatchedQueries))
+ }
+ }
+ } else {
+ if recordSource["comment"] != nil {
+ desc := getLabelValue("comment", recordSource, hit.Highlight)
+ record["content"] = dealLongText(desc, Key, hit.MatchedQueries)
+ }
+ }
+ if recordSource["pr_id"] != nil {
+ record["pr_id"] = recordSource["pr_id"]
+ }
+ log.Info("index=" + recordSource["index"].(string))
+ record["index"] = recordSource["index"]
+ record["num_comments"] = recordSource["num_comments"]
+ record["is_closed"] = recordSource["is_closed"]
+ record["updated_unix"] = recordSource["updated_unix"]
+ setUpdateHtml(record, recordSource["updated_unix"].(string), language)
+ result = append(result, record)
+ } else {
+ log.Info("deal issue source error," + err.Error())
+ }
+ } else {
+ log.Info("deal issue source error," + err.Error())
+ }
+ }
+ }
+ returnObj := &SearchRes{
+ Total: total,
+ Result: result,
+ }
+
+ return returnObj
+}
diff --git a/services/socketwrap/clientManager.go b/services/socketwrap/clientManager.go
old mode 100644
new mode 100755
index 98b0e0aa9..61f356a66
--- a/services/socketwrap/clientManager.go
+++ b/services/socketwrap/clientManager.go
@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap"
)
-var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30}
+var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31}
type ClientsManager struct {
Clients *orderedmap.OrderedMap
diff --git a/templates/base/head.tmpl b/templates/base/head.tmpl
index 2cecee52b..937abd588 100755
--- a/templates/base/head.tmpl
+++ b/templates/base/head.tmpl
@@ -215,10 +215,10 @@ var _hmt = _hmt || [];
localStorage.setItem("isCloseNotice",true)
}
function isShowNotice(){
- var current_notice = localStorage.getItem("notice")
+ var current_notice = localStorage.getItem("notices")
- if (current_notice != "{{.notice.CommitId}}"){
- localStorage.setItem('notice',"{{.notice.CommitId}}");
+ if (current_notice != "{{.notices.CommitId}}"){
+ localStorage.setItem('notices',"{{.notices.CommitId}}");
isNewNotice=true;
localStorage.setItem("isCloseNotice",false)
}else{
diff --git a/templates/base/head_fluid.tmpl b/templates/base/head_fluid.tmpl
index 59e542b0b..5340c7cb8 100644
--- a/templates/base/head_fluid.tmpl
+++ b/templates/base/head_fluid.tmpl
@@ -216,10 +216,10 @@ var _hmt = _hmt || [];
localStorage.setItem("isCloseNotice",true)
}
function isShowNotice(){
- var current_notice = localStorage.getItem("notice")
+ var current_notice = localStorage.getItem("notices")
- if (current_notice != "{{.notice.CommitId}}"){
- localStorage.setItem('notice',"{{.notice.CommitId}}");
+ if (current_notice != "{{.notices.CommitId}}"){
+ localStorage.setItem('notices',"{{.notices.CommitId}}");
isNewNotice=true;
localStorage.setItem("isCloseNotice",false)
}else{
diff --git a/templates/base/head_home.tmpl b/templates/base/head_home.tmpl
index 561edd5ce..25d7a92ec 100644
--- a/templates/base/head_home.tmpl
+++ b/templates/base/head_home.tmpl
@@ -220,10 +220,10 @@ var _hmt = _hmt || [];
localStorage.setItem("isCloseNotice",true)
}
function isShowNotice(){
- var current_notice = localStorage.getItem("notice")
+ var current_notice = localStorage.getItem("notices")
- if (current_notice != "{{.notice.CommitId}}"){
- localStorage.setItem('notice',"{{.notice.CommitId}}");
+ if (current_notice != "{{.notices.CommitId}}"){
+ localStorage.setItem('notices',"{{.notices.CommitId}}");
isNewNotice=true;
localStorage.setItem("isCloseNotice",false)
}else{
diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl
index 0ce3f2fcf..18cf3ce0e 100755
--- a/templates/base/head_navbar.tmpl
+++ b/templates/base/head_navbar.tmpl
@@ -95,9 +95,9 @@
{{if .IsSigned}}