Browse Source

Merge branch 'fix-1591' of https://git.openi.org.cn/OpenI/aiforge into fix-1591

pull/1693/head
zhoupzh 3 years ago
parent
commit
740e033fd9
6 changed files with 162 additions and 11 deletions
  1. +17
    -7
      models/attachment.go
  2. +6
    -0
      models/dataset.go
  3. +4
    -0
      routers/repo/attachment.go
  4. +131
    -3
      routers/repo/dataset.go
  5. +4
    -1
      routers/routes/routes.go
  6. +0
    -0
      templates/repo/datasets/tasks/index.tmpl

+ 17
- 7
models/attachment.go View File

@@ -63,9 +63,12 @@ type AttachmentInfo struct {


type AttachmentsOptions struct { type AttachmentsOptions struct {
ListOptions ListOptions
DatasetID int8
DatasetIDs []int64
DecompressState int DecompressState int
Type int Type int
UploaderID int64
NeedIsPrivate bool
IsPrivate bool
NeedRepoInfo bool NeedRepoInfo bool
} }


@@ -524,27 +527,34 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
defer sess.Close() defer sess.Close()


var cond = builder.NewCond() var cond = builder.NewCond()
if opts.DatasetID > 0 {

if len(opts.DatasetIDs) > 0 {
cond = cond.And( cond = cond.And(
builder.Eq{"attachment.dataset_id": opts.DatasetID},
builder.In("attachment.dataset_id", opts.DatasetIDs),
) )
} }


if opts.DecompressState > 0 {
if opts.UploaderID > 0 {
cond = cond.And( cond = cond.And(
builder.Eq{"attachment.decompress_state": opts.DecompressState},
builder.Eq{"attachment.uploader_id": opts.UploaderID},
) )
} }


if (opts.Type) >= 0 { if (opts.Type) >= 0 {
cond = cond.And( cond = cond.And(
builder.Eq{"cloudbrain.type": opts.Type},
builder.Eq{"attachment.type": opts.Type},
)
}

if opts.NeedIsPrivate {
cond = cond.And(
builder.Eq{"attachment.is_private": opts.IsPrivate},
) )
} }


var count int64 var count int64
var err error var err error
if opts.DatasetID > 0 {
if (opts.Type) >= 0 {
count, err = sess.Where(cond).Count(new(Attachment)) count, err = sess.Where(cond).Count(new(Attachment))
} }




+ 6
- 0
models/dataset.go View File

@@ -346,6 +346,12 @@ func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
} }
} }


func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) {
datasetStars := make([]*DatasetStar, 0)
err := x.Cols("id", "uid", "DatasetID", "CreatedUnix").Where("uid=?", user.ID).Find(&datasetStars)
return datasetStars, err
}

func DeleteDataset(datasetID int64, uid int64) error { func DeleteDataset(datasetID int64, uid int64) error {
var err error var err error
sess := x.NewSession() sess := x.NewSession()


+ 4
- 0
routers/repo/attachment.go View File

@@ -814,6 +814,9 @@ func CompleteMultipart(ctx *context.Context) {
typeCloudBrain := ctx.QueryInt("type") typeCloudBrain := ctx.QueryInt("type")
fileName := ctx.Query("file_name") fileName := ctx.Query("file_name")


log.Warn("uuid:" + uuid)
log.Warn("typeCloudBrain:" + strconv.Itoa(typeCloudBrain))

err := checkTypeCloudBrain(typeCloudBrain) err := checkTypeCloudBrain(typeCloudBrain)
if err != nil { if err != nil {
ctx.ServerError("checkTypeCloudBrain failed", err) ctx.ServerError("checkTypeCloudBrain failed", err)
@@ -852,6 +855,7 @@ func CompleteMultipart(ctx *context.Context) {
return return
} }
dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id")) dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id"))
log.Warn("insert attachment to datasetId:" + strconv.FormatInt(dataset.ID, 10))
attachment, err := models.InsertAttachment(&models.Attachment{ attachment, err := models.InsertAttachment(&models.Attachment{
UUID: uuid, UUID: uuid,
UploaderID: ctx.User.ID, UploaderID: ctx.User.ID,


+ 131
- 3
routers/repo/dataset.go View File

@@ -283,16 +283,144 @@ func DatasetAction(ctx *context.Context) {


} }


func TasksDatasetIndex(ctx *context.Context) {
func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page") page := ctx.QueryInt("page")
// repo := ctx.Repo.Repository
cloudbrainType := ctx.QueryInt("type")
repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
}
datasetIDs = append(datasetIDs, dataset.ID)
uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.IssuePagingNum,
},
DatasetIDs: datasetIDs,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")

uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.IssuePagingNum,
},
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")

datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.IssuePagingNum,
},
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyFavoriteDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")

var datasetIDs []int64

datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
}
for i, _ := range datasetStars {
datasetIDs = append(datasetIDs, datasetStars[i].DatasetID)
}


datasets, count, err := models.Attachments(&models.AttachmentsOptions{ datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{ ListOptions: models.ListOptions{
Page: page, Page: page,
PageSize: setting.UI.IssuePagingNum, PageSize: setting.UI.IssuePagingNum,
}, },
Type: models.TypeCloudBrainTwo,
DatasetIDs: datasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
NeedRepoInfo: true,
}) })
if err != nil { if err != nil {
ctx.ServerError("datasets", err) ctx.ServerError("datasets", err)


+ 4
- 1
routers/routes/routes.go View File

@@ -985,7 +985,10 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost) m.Post("/create", reqRepoDatasetWriter, bindIgnErr(auth.CreateDatasetForm{}), repo.CreateDatasetPost)
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset) m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost) m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Get("/tasks", reqRepoDatasetReader, repo.TasksDatasetIndex)
m.Get("/current_repo", reqRepoDatasetReader, repo.CurrentRepoDataset)
m.Get("/my_datasets", reqRepoDatasetReader, repo.MyDataset)
m.Get("/public_datasets", reqRepoDatasetReader, repo.PublicDataset)
m.Get("/my_favorite", reqRepoDatasetReader, repo.MyFavoriteDataset)


m.Group("/dirs", func() { m.Group("/dirs", func() {
m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex) m.Get("/:uuid", reqRepoDatasetReader, repo.DirIndex)


+ 0
- 0
templates/repo/datasets/tasks/index.tmpl View File


Loading…
Cancel
Save