Browse Source

Merge branch 'fix-1591' of https://git.openi.org.cn/OpenI/aiforge into fix-1591

pull/1693/head
liuzx 3 years ago
parent
commit
adaf461f86
14 changed files with 348 additions and 110 deletions
  1. +1
    -0
      models/attachment.go
  2. +16
    -0
      models/base_message.go
  3. +41
    -28
      models/dataset.go
  4. +62
    -0
      models/dataset_star.go
  5. +19
    -4
      models/repo.go
  6. +5
    -4
      models/user.go
  7. +4
    -6
      modules/auth/dataset.go
  8. +5
    -0
      options/locale/locale_en-US.ini
  9. +7
    -0
      options/locale/locale_zh-CN.ini
  10. +13
    -0
      routers/home.go
  11. +28
    -12
      routers/repo/attachment.go
  12. +141
    -51
      routers/repo/dataset.go
  13. +0
    -4
      routers/repo/setting.go
  14. +6
    -1
      routers/routes/routes.go

+ 1
- 0
models/attachment.go View File

@@ -38,6 +38,7 @@ type Attachment struct {
UploaderID int64 `xorm:"INDEX DEFAULT 0"` // Notice: will be zero before this column added
CommentID int64
Name string
Description string `xorm:"TEXT"`
DownloadCount int64 `xorm:"DEFAULT 0"`
Size int64 `xorm:"DEFAULT 0"`
IsPrivate bool `xorm:"DEFAULT false"`


+ 16
- 0
models/base_message.go View File

@@ -0,0 +1,16 @@
package models

type BaseMessage struct {
Code int
Message string
}

var BaseOKMessage = BaseMessage{
0, "",
}

func BaseErrorMessage(message string) BaseMessage {
return BaseMessage{
1, message,
}
}

+ 41
- 28
models/dataset.go View File

@@ -22,6 +22,7 @@ type Dataset struct {
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64
NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
License string
Task string
ReleaseID int64 `xorm:"INDEX"`
@@ -91,6 +92,9 @@ type SearchDatasetOptions struct {
OwnerID int64
RepoID int64
IncludePublic bool
Category string
Task string
License string
ListOptions
SearchOrderBy
IsOwner bool
@@ -104,22 +108,6 @@ func CreateDataset(dataset *Dataset) (err error) {
return nil
}

func CreateDefaultDatasetToRepo(repo *Repository) (err error) {
dataset := &Dataset{RepoID: repo.ID}
has, err := x.Get(dataset)
if err != nil {
return err
}
if !has {
dataset.Status = DatasetStatusPrivate
dataset.Title = repo.Name
if err = CreateDataset(dataset); err != nil {
return err
}
}
return nil
}

func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) {
cond := SearchDatasetCondition(opts)
return SearchDatasetByCondition(opts, cond)
@@ -133,12 +121,24 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
cond = cond.And(builder.Like{"dataset.title", opts.Keyword})
}

if len(opts.Category) > 0 {
cond = cond.And(builder.Eq{"dataset.category": opts.Category})
}

if len(opts.Task) > 0 {
cond = cond.And(builder.Eq{"dataset.task": opts.Task})
}
if len(opts.License) > 0 {
cond = cond.And(builder.Eq{"dataset.license": opts.License})
}

if opts.RepoID > 0 {
cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID})
}

if opts.IncludePublic {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
cond = cond.And(builder.Eq{"attachment.is_private": false})
if opts.OwnerID > 0 {
if len(opts.Keyword) == 0 {
cond = cond.Or(builder.Eq{"repository.owner_id": opts.OwnerID})
@@ -153,6 +153,7 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})
if !opts.IsOwner {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
cond = cond.And(builder.Eq{"attachment.is_private": false})
}
}

@@ -169,14 +170,19 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da
defer sess.Close()

datasets := make(DatasetList, 0, opts.PageSize)
selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars"

count, err := sess.Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).Count(new(Dataset))
count, err := sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id").
Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
Where(cond).Count(new(Dataset))

if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}

sess.Select("dataset.*").Join("INNER", "repository", "repository.id = dataset.repo_id").Where(cond).OrderBy(opts.SearchOrderBy.String())
sess.Select(selectColumnsSql).Join("INNER", "repository", "repository.id = dataset.repo_id").
Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
Where(cond).OrderBy(opts.SearchOrderBy.String())
if opts.PageSize > 0 {
sess.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
}
@@ -231,13 +237,23 @@ func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *Us
sort.Sort(sortedRels)

// Select attachments
err = e.
Asc("dataset_id").
In("dataset_id", sortedRels.ID).
And("type = ?", typeCloudBrain).
Find(&attachments, Attachment{})
if err != nil {
return err
if typeCloudBrain == -1 {
err = e.
Asc("dataset_id").
In("dataset_id", sortedRels.ID).
Find(&attachments, Attachment{})
if err != nil {
return err
}
} else {
err = e.
Asc("dataset_id").
In("dataset_id", sortedRels.ID).
And("type = ?", typeCloudBrain).
Find(&attachments, Attachment{})
if err != nil {
return err
}
}

// merge join
@@ -301,9 +317,6 @@ func GetDatasetByID(id int64) (*Dataset, error) {
}

func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
if err := CreateDefaultDatasetToRepo(repo); err != nil {
return nil, err
}
dataset := &Dataset{RepoID: repo.ID}
has, err := x.Get(dataset)
if err != nil {


+ 62
- 0
models/dataset_star.go View File

@@ -0,0 +1,62 @@
package models

import "code.gitea.io/gitea/modules/timeutil"

type DatasetStar struct {
ID int64 `xorm:"pk autoincr"`
UID int64 `xorm:"UNIQUE(s)"`
DatasetID int64 `xorm:"UNIQUE(s)"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
}

// StarRepo or unstar repository.
func StarDataset(userID, datasetID int64, star bool) error {
sess := x.NewSession()
defer sess.Close()

if err := sess.Begin(); err != nil {
return err
}

if star {
if isDatasetStaring(sess, userID, datasetID) {
return nil
}

if _, err := sess.Insert(&DatasetStar{UID: userID, DatasetID: datasetID}); err != nil {
return err
}
if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars + 1 WHERE id = ?", datasetID); err != nil {
return err
}
if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars + 1 WHERE id = ?", userID); err != nil {
return err
}
} else {
if !isDatasetStaring(sess, userID, datasetID) {
return nil
}

if _, err := sess.Delete(&DatasetStar{0, userID, datasetID, 0}); err != nil {
return err
}
if _, err := sess.Exec("UPDATE `dataset` SET num_stars = num_stars - 1 WHERE id = ?", datasetID); err != nil {
return err
}
if _, err := sess.Exec("UPDATE `user` SET num_dataset_stars = num_dataset_stars - 1 WHERE id = ?", userID); err != nil {
return err
}
}

return sess.Commit()
}

func IsDatasetStaring(userID, datasetID int64) bool {

return isDatasetStaring(x, userID, datasetID)
}

func isDatasetStaring(e Engine, userID, datasetID int64) bool {
has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0})
return has
}

+ 19
- 4
models/repo.go View File

@@ -1280,10 +1280,6 @@ func CreateRepository(ctx DBContext, doer, u *User, repo *Repository, opts ...Cr
return fmt.Errorf("copyDefaultWebhooksToRepo: %v", err)
}

if err = CreateDefaultDatasetToRepo(repo); err != nil {
return fmt.Errorf("models.CreateDefaultDatasetToRepo: %v", err)
}

return nil
}

@@ -1586,6 +1582,25 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e
if err != nil {
return err
}
//If repo has become private, we need set dataset and dataset_file to private
_, err = e.Where("repo_id = ?", repo.ID).Cols("status").Update(&Dataset{
Status: 0,
})
if err != nil {
return err
}

dataset, err := GetDatasetByRepo(repo)
if err != nil {
return err
}
_, err = e.Where("dataset_id = ?", dataset.ID).Cols("is_private").Update(&Attachment{
IsPrivate: true,
})
if err != nil {
return err
}

}

// Create/Remove git-daemon-export-ok for git-daemon...


+ 5
- 4
models/user.go View File

@@ -153,10 +153,11 @@ type User struct {
UseCustomAvatar bool

// Counters
NumFollowers int
NumFollowing int `xorm:"NOT NULL DEFAULT 0"`
NumStars int
NumRepos int
NumFollowers int
NumFollowing int `xorm:"NOT NULL DEFAULT 0"`
NumStars int
NumDatasetStars int `xorm:"NOT NULL DEFAULT 0"`
NumRepos int

// For organization
NumTeams int


+ 4
- 6
modules/auth/dataset.go View File

@@ -9,11 +9,10 @@ import (
type CreateDatasetForm struct {
Title string `binding:"Required"`
Category string `binding:"Required"`
Description string `binding:"Required;MaxSize(254)"`
Description string `binding:"Required"`
License string `binding:"Required;MaxSize(64)"`
Task string `binding:"Required;MaxSize(64)"`
ReleaseID int64 `xorm:"INDEX"`
Private bool
Files []string
}

@@ -25,11 +24,10 @@ type EditDatasetForm struct {
ID int64 `binding:"Required"`
Title string `binding:"Required"`
Category string `binding:"Required"`
Description string `binding:"Required;MaxSize(254)"`
Description string `binding:"Required"`
License string `binding:"Required;MaxSize(64)"`
Task string `binding:"Required;MaxSize(64)"`
Private bool
ReleaseID int64 `xorm:"INDEX"`
ReleaseID int64 `xorm:"INDEX"`
Files []string
Type string `binding:"Required"`
Type string `binding:"Required"`
}

+ 5
- 0
options/locale/locale_en-US.ini View File

@@ -709,8 +709,12 @@ alert = To initiate a cloud brain task, please upload the dataset in zip format.
dataset = Dataset
dataset_setting= Dataset Setting
title = Name
title_format_err=Name can only contain number,letter,'-','_' or '.', and can be up to 100 characters long.
description = Description
description_format_err=Description's length can be up to 1024 characters long.
create_dataset = Create Dataset
create_dataset_fail=Failed to create dataset.
query_dataset_fail=Failed to query dataset.
show_dataset= Dataset
edit_dataset= Edit Dataset
update_dataset= Update Dataset
@@ -1061,6 +1065,7 @@ unstar = Unstar
star = Star
fork = Fork
download_archive = Download Repository
star_fail=Failed to %s the dataset.

no_desc = No Description
no_label = No labels


+ 7
- 0
options/locale/locale_zh-CN.ini View File

@@ -712,8 +712,13 @@ alert=如果要发起云脑任务,请上传zip格式的数据集
dataset=数据集
dataset_setting=数据集设置
title=名称
title_format_err=名称最多允许输入100个字符,只允许字母,数字,中划线 (‘-’),下划线 (‘_’) 和点 (‘.’) 。
description=描述
description_format_err=描述最多允许输入1024个字符。
create_dataset=创建数据集
create_dataset_fail=创建数据集失败。
query_dataset_fail=查询数据集失败

show_dataset=数据集
edit_dataset=编辑数据集
update_dataset=更新数据集
@@ -1070,6 +1075,8 @@ unstar=取消点赞
star=点赞
fork=派生
download_archive=下载此项目
star_fail=%s失败。


no_desc=暂无描述
no_label = 暂无标签


+ 13
- 0
routers/home.go View File

@@ -301,6 +301,10 @@ func ExploreDatasets(ctx *context.Context) {
orderBy = models.SearchOrderBySizeReverse
case "downloadtimes":
orderBy = models.SearchOrderByDownloadTimes
case "moststars":
orderBy = models.SearchOrderByStarsReverse
case "feweststars":
orderBy = models.SearchOrderByStars
default:
ctx.Data["SortType"] = "recentupdate"
orderBy = models.SearchOrderByRecentUpdated
@@ -308,6 +312,9 @@ func ExploreDatasets(ctx *context.Context) {

keyword := strings.Trim(ctx.Query("q"), " ")

category := ctx.Query("category")
task := ctx.Query("task")
license := ctx.Query("license")
var ownerID int64
if ctx.User != nil && !ctx.User.IsAdmin {
ownerID = ctx.User.ID
@@ -316,6 +323,9 @@ func ExploreDatasets(ctx *context.Context) {
Keyword: keyword,
IncludePublic: true,
SearchOrderBy: orderBy,
Category: category,
Task: task,
License: license,
OwnerID: ownerID,
ListOptions: models.ListOptions{
Page: page,
@@ -331,6 +341,9 @@ func ExploreDatasets(ctx *context.Context) {

pager := context.NewPagination(int(count), opts.PageSize, page, 5)
ctx.Data["Keyword"] = opts.Keyword
ctx.Data["Category"] = category
ctx.Data["Task"] = task
ctx.Data["License"] = license
pager.SetDefaultParams(ctx)
ctx.Data["Page"] = pager



+ 28
- 12
routers/repo/attachment.go View File

@@ -15,6 +15,8 @@ import (
"strconv"
"strings"

"code.gitea.io/gitea/modules/base"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/labelmsg"
@@ -30,8 +32,9 @@ import (

const (
//result of decompress
DecompressSuccess = "0"
DecompressFailed = "1"
DecompressSuccess = "0"
DecompressFailed = "1"
tplAttachmentUpload base.TplName = "repo/attachment/upload"
)

type CloudBrainDataset struct {
@@ -63,6 +66,12 @@ func renderAttachmentSettings(ctx *context.Context) {
ctx.Data["AttachmentMaxFiles"] = setting.Attachment.MaxFiles
}

func UploadAttachmentUI(ctx *context.Context) {
ctx.Data["datasetId"] = ctx.Query("datasetId")
ctx.HTML(200, tplAttachmentUpload)

}

// UploadAttachment response for uploading issue's attachment
func UploadAttachment(ctx *context.Context) {
if !setting.Attachment.Enabled {
@@ -387,11 +396,17 @@ func AddAttachment(ctx *context.Context) {
ctx.Error(404, "attachment has not been uploaded")
return
}
datasetId := ctx.QueryInt64("dataset_id")
dataset, err := models.GetDatasetByID(datasetId)
if err != nil {
ctx.Error(404, "dataset does not exist.")
return
}

attachment, err := models.InsertAttachment(&models.Attachment{
UUID: uuid,
UploaderID: ctx.User.ID,
IsPrivate: true,
IsPrivate: dataset.IsPrivate(),
Name: fileName,
Size: ctx.QueryInt64("size"),
DatasetID: ctx.QueryInt64("dataset_id"),
@@ -830,22 +845,23 @@ func CompleteMultipart(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}
dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id"))
attachment, err := models.InsertAttachment(&models.Attachment{
UUID: uuid,
UploaderID: ctx.User.ID,
IsPrivate: true,
Name: fileName,
Size: ctx.QueryInt64("size"),
DatasetID: ctx.QueryInt64("dataset_id"),
Type: typeCloudBrain,
UUID: uuid,
UploaderID: ctx.User.ID,
IsPrivate: dataset.IsPrivate(),
Name: fileName,
Size: ctx.QueryInt64("size"),
DatasetID: ctx.QueryInt64("dataset_id"),
Description: ctx.Query("description"),
Type: typeCloudBrain,
})

if err != nil {
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err))
return
}
dataset, _ := models.GetDatasetByID(attachment.DatasetID)
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment)



+ 141
- 51
routers/repo/dataset.go View File

@@ -1,7 +1,11 @@
package repo

import (
"net/http"
"regexp"
"sort"
"strconv"
"unicode/utf8"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
@@ -12,9 +16,13 @@ import (
)

const (
tplIndex base.TplName = "repo/datasets/index"
tplIndex base.TplName = "repo/datasets/index"
tplDatasetCreate base.TplName = "repo/datasets/create"
tplDatasetEdit base.TplName = "repo/datasets/edit"
)

var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`)

// MustEnableDataset check if repository enable internal dataset
func MustEnableDataset(ctx *context.Context) {
if !ctx.Repo.CanRead(models.UnitTypeDatasets) {
@@ -84,21 +92,11 @@ func QueryDataSet(ctx *context.Context) []*models.Attachment {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)

ctx.Data["SortType"] = ctx.Query("sort")
switch ctx.Query("sort") {
case "newest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
case "oldest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix < attachments[j].CreatedUnix
})
default:
ctx.Data["SortType"] = "newest"
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
}

sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})

return attachments
}

@@ -110,17 +108,16 @@ func DatasetIndex(ctx *context.Context) {

dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
log.Error("query dataset, not found repo.")
ctx.NotFound("GetDatasetByRepo", err)
log.Warn("query dataset, not found.")
ctx.HTML(200, tplIndex)
return
}
cloudbrainType := -1
if ctx.Query("type") != "" {

if ctx.Query("type") == "" {
log.Error("query dataset, not found param type")
ctx.NotFound("type error", nil)
return
cloudbrainType = ctx.QueryInt("type")
}
err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset)
err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset)
if err != nil {
ctx.ServerError("GetDatasetAttachments", err)
return
@@ -128,53 +125,130 @@ func DatasetIndex(ctx *context.Context) {

attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)

ctx.Data["SortType"] = ctx.Query("sort")
switch ctx.Query("sort") {
case "newest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
case "oldest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix < attachments[j].CreatedUnix
})
default:
ctx.Data["SortType"] = "newest"
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})

page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = setting.UI.ExplorePagingNum
}
pager := context.NewPagination(len(attachments), pagesize, page, 5)

pageAttachments := getPageAttachments(attachments, page, pagesize)

ctx.Data["Page"] = pager
ctx.Data["PageIsDataset"] = true
ctx.Data["Title"] = ctx.Tr("dataset.show_dataset")
ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets"
ctx.Data["dataset"] = dataset
ctx.Data["Attachments"] = attachments
ctx.Data["Attachments"] = pageAttachments
ctx.Data["IsOwner"] = true
ctx.Data["StoreType"] = setting.Attachment.StoreType
ctx.Data["Type"] = ctx.QueryInt("type")
ctx.Data["Type"] = cloudbrainType

renderAttachmentSettings(ctx)

ctx.HTML(200, tplIndex)
}

func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment {
begin := (page - 1) * pagesize
end := (page) * pagesize

if begin > len(attachments)-1 {
return nil
}
if end > len(attachments)-1 {
return attachments[begin:]
} else {
return attachments[begin:end]
}

}

func CreateDataset(ctx *context.Context) {

MustEnableDataset(ctx)

ctx.HTML(200, tplDatasetCreate)
}

func EditDataset(ctx *context.Context) {

MustEnableDataset(ctx)
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)

dataset, _ := models.GetDatasetByID(datasetId)
if dataset == nil {
ctx.Error(http.StatusNotFound, "")
return
}
ctx.Data["Dataset"] = dataset

ctx.HTML(200, tplDatasetEdit)
}

func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) {

dataset := &models.Dataset{}

if !titlePattern.MatchString(form.Title) {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
return
}
if utf8.RuneCountInString(form.Description) > 1024 {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
return
}

dataset.RepoID = ctx.Repo.Repository.ID
dataset.UserID = ctx.User.ID
dataset.Category = form.Category
dataset.Task = form.Task
dataset.Title = form.Title
dataset.License = form.License
dataset.Description = form.Description
dataset.DownloadTimes = 0
if ctx.Repo.Repository.IsPrivate {
dataset.Status = 0
} else {
dataset.Status = 1
}
err := models.CreateDataset(dataset)
if err != nil {
log.Error("fail to create dataset", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail")))
} else {
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}

}

func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
ctx.Data["PageIsDataset"] = true

ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset")

if !titlePattern.MatchString(form.Title) {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
return
}
if utf8.RuneCountInString(form.Description) > 1024 {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
return
}

rel, err := models.GetDatasetByID(form.ID)
ctx.Data["dataset"] = rel

if err != nil {
ctx.ServerError("GetDataset", err)
return
}

if ctx.HasError() {
ctx.Data["Error"] = true
ctx.HTML(200, tplIndex)
log.Error("failed to query dataset", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
return
}

@@ -184,9 +258,25 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
rel.Task = form.Task
rel.License = form.License
if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil {
ctx.Data["Error"] = true
ctx.HTML(200, tplIndex)
log.Error("%v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
}
ctx.Redirect(ctx.Repo.RepoLink + "/datasets?type=" + form.Type)
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}

func DatasetAction(ctx *context.Context) {
var err error
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
switch ctx.Params(":action") {
case "star":
err = models.StarDataset(ctx.User.ID, datasetId, true)
case "unstar":
err = models.StarDataset(ctx.User.ID, datasetId, false)

}
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
} else {
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}

}

+ 0
- 4
routers/repo/setting.go View File

@@ -245,10 +245,6 @@ func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) {
// This section doesn't require repo_name/RepoName to be set in the form, don't show it
// as an error on the UI for this action
ctx.Data["Err_RepoName"] = nil
if err := models.CreateDefaultDatasetToRepo(repo); err != nil {
ctx.ServerError("CreateDefaultDatasetToRepo", err)
return
}

if form.EnableDataset && !models.UnitTypeDatasets.UnitGlobalDisabled() {
units = append(units, models.RepoUnit{


+ 6
- 1
routers/routes/routes.go View File

@@ -586,6 +586,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/delete", repo.DeleteAttachment)
m.Get("/get_pre_url", repo.GetPresignedPutObjectURL)
m.Post("/add", repo.AddAttachment)
m.Get("/upload", repo.UploadAttachmentUI)
m.Post("/private", repo.UpdatePublicAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
@@ -978,7 +979,11 @@ func RegisterRoutes(m *macaron.Macaron) {

m.Group("/datasets", func() {
m.Get("", reqRepoDatasetReader, repo.DatasetIndex)
m.Post("", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Put("/:id/:action", reqRepoDatasetReader, repo.DatasetAction)
m.Get("/create", reqRepoDatasetWriter, repo.CreateDataset)
m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost)
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)

m.Group("/dirs", func() {
m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex)


Loading…
Cancel
Save