@@ -560,6 +560,35 @@ func GetAttachmentSizeByDatasetID(datasetID int64) (int64, error) { | |||
return total, nil | |||
} | |||
func AttachmentsByDatasetOption(datasets []int64, opts *SearchDatasetOptions) ([]*Attachment, error) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
var cond = builder.NewCond() | |||
cond = cond.And(builder.In("attachment.dataset_id", datasets)) | |||
if opts.JustNeedZipFile { | |||
cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | |||
} | |||
if opts.PublicOnly { | |||
cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | |||
} | |||
if opts.CloudBrainType >= 0 { | |||
cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType}) | |||
} | |||
if opts.UploadAttachmentByMe { | |||
cond = cond.And( | |||
builder.Eq{"attachment.uploader_id": opts.OwnerID}, | |||
) | |||
} | |||
attachments := make([]*Attachment, 0) | |||
if err := sess.Table(&Attachment{}).Where(cond). | |||
Find(&attachments); err != nil { | |||
return nil, fmt.Errorf("Find: %v", err) | |||
} | |||
return attachments, nil | |||
} | |||
func GetAllAttachmentSize() (int64, error) { | |||
return x.SumInt(&Attachment{}, "size") | |||
} | |||
@@ -81,12 +81,14 @@ func (datasets DatasetList) loadAttributes(e Engine) error { | |||
if err := e. | |||
Where("id > 0"). | |||
In("id", keysInt64(userIdSet)). | |||
Cols("id", "lower_name", "name", "full_name", "email"). | |||
Find(&users); err != nil { | |||
return fmt.Errorf("find users: %v", err) | |||
} | |||
if err := e. | |||
Where("id > 0"). | |||
In("id", keysInt64(set)). | |||
Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias"). | |||
Find(&repos); err != nil { | |||
return fmt.Errorf("find repos: %v", err) | |||
} | |||
@@ -98,19 +100,77 @@ func (datasets DatasetList) loadAttributes(e Engine) error { | |||
return nil | |||
} | |||
func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) error { | |||
if len(datasets) == 0 { | |||
return nil | |||
} | |||
datasetIDs := make([]int64, len(datasets)) | |||
for i := range datasets { | |||
datasetIDs[i] = datasets[i].ID | |||
} | |||
attachments, err := AttachmentsByDatasetOption(datasetIDs, opts) | |||
if err != nil { | |||
return fmt.Errorf("GetAttachmentsByDatasetIds failed error: %v", err) | |||
} | |||
permissionMap := make(map[int64]*Permission, len(datasets)) | |||
for _, attachment := range attachments { | |||
for i := range datasets { | |||
if attachment.DatasetID == datasets[i].ID { | |||
if opts.StarByMe { | |||
var permission *Permission | |||
if permission = permissionMap[datasets[i].ID]; permission == nil { | |||
permissionInstance, err := GetUserRepoPermission(datasets[i].Repo, opts.User) | |||
if err != nil { | |||
return fmt.Errorf("GetPermission failed error: %v", err) | |||
} | |||
permission = &permissionInstance | |||
} | |||
if permission.HasAccess() { | |||
datasets[i].Attachments = append(datasets[i].Attachments, attachment) | |||
} else if !attachment.IsPrivate { | |||
datasets[i].Attachments = append(datasets[i].Attachments, attachment) | |||
} | |||
} else { | |||
datasets[i].Attachments = append(datasets[i].Attachments, attachment) | |||
} | |||
} | |||
} | |||
} | |||
for i := range datasets { | |||
datasets[i].Repo.Owner = nil | |||
} | |||
return nil | |||
} | |||
type SearchDatasetOptions struct { | |||
Keyword string | |||
OwnerID int64 | |||
User *User | |||
RepoID int64 | |||
IncludePublic bool | |||
RecommendOnly bool | |||
Category string | |||
Task string | |||
License string | |||
DatasetIDs []int64 | |||
DatasetIDs []int64 // 目前只在StarByMe为true时起作用 | |||
ListOptions | |||
SearchOrderBy | |||
IsOwner bool | |||
IsOwner bool | |||
StarByMe bool | |||
CloudBrainType int //0 cloudbrain 1 modelarts -1 all | |||
PublicOnly bool | |||
JustNeedZipFile bool | |||
NeedAttachment bool | |||
UploadAttachmentByMe bool | |||
} | |||
func CreateDataset(dataset *Dataset) (err error) { | |||
@@ -159,29 +219,36 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond { | |||
if opts.RepoID > 0 { | |||
cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID}) | |||
} | |||
if opts.IncludePublic { | |||
if opts.PublicOnly { | |||
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | |||
cond = cond.And(builder.Eq{"attachment.is_private": false}) | |||
} else if opts.IncludePublic { | |||
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | |||
cond = cond.And(builder.Eq{"attachment.is_private": false}) | |||
if opts.OwnerID > 0 { | |||
subCon := builder.NewCond() | |||
subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID}) | |||
subCon = generateFilterCond(opts, subCon) | |||
cond = cond.Or(subCon) | |||
} | |||
} else if opts.OwnerID > 0 { | |||
} else if opts.OwnerID > 0 && !opts.StarByMe && !opts.UploadAttachmentByMe { | |||
cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID}) | |||
if !opts.IsOwner { | |||
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic}) | |||
cond = cond.And(builder.Eq{"attachment.is_private": false}) | |||
} | |||
} | |||
if len(opts.DatasetIDs) > 0 { | |||
subCon := builder.NewCond() | |||
subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs)) | |||
cond = cond.Or(subCon) | |||
if opts.StarByMe { | |||
cond = cond.And(builder.In("dataset.id", opts.DatasetIDs)) | |||
} else { | |||
subCon := builder.NewCond() | |||
subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs)) | |||
cond = cond.Or(subCon) | |||
} | |||
} | |||
return cond | |||
@@ -207,6 +274,17 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C | |||
cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly}) | |||
} | |||
if opts.JustNeedZipFile { | |||
cond = cond.And(builder.Gt{"attachment.decompress_state": 0}) | |||
} | |||
if opts.CloudBrainType >= 0 { | |||
cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType}) | |||
} | |||
if opts.UploadAttachmentByMe { | |||
cond = cond.And(builder.Eq{"attachment.uploader_id": opts.OwnerID}) | |||
} | |||
return cond | |||
} | |||
@@ -245,6 +323,12 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da | |||
return nil, 0, fmt.Errorf("LoadAttributes: %v", err) | |||
} | |||
if opts.NeedAttachment { | |||
if err = datasets.loadAttachmentAttributes(opts); err != nil { | |||
return nil, 0, fmt.Errorf("LoadAttributes: %v", err) | |||
} | |||
} | |||
return datasets, count, nil | |||
} | |||
@@ -460,5 +544,12 @@ func GetCollaboratorDatasetIdsByUserID(userID int64) []int64 { | |||
_ = x.Table("dataset").Join("INNER", "collaboration", "dataset.repo_id = collaboration.repo_id and collaboration.mode>0 and collaboration.user_id=?", userID). | |||
Cols("dataset.id").Find(&datasets) | |||
return datasets | |||
} | |||
func GetTeamDatasetIdsByUserID(userID int64) []int64 { | |||
var datasets []int64 | |||
_ = x.Table("dataset").Join("INNER", "team_repo", "dataset.repo_id = team_repo.repo_id"). | |||
Join("INNER", "team_user", "team_repo.team_id=team_user.team_id and team_user.uid=?", userID). | |||
Cols("dataset.id").Find(&datasets) | |||
return datasets | |||
} |
@@ -68,3 +68,10 @@ func isDatasetStaring(e Engine, userID, datasetID int64) bool { | |||
has, _ := e.Get(&DatasetStar{0, userID, datasetID, 0}) | |||
return has | |||
} | |||
func GetDatasetIdsStarByUser(userID int64) []int64 { | |||
var datasets []int64 | |||
_ = x.Table("dataset_star").Where("uid=?", userID). | |||
Cols("dataset_star.dataset_id").Find(&datasets) | |||
return datasets | |||
} |
@@ -77,9 +77,10 @@ func Datasets(ctx *context.Context) { | |||
Page: page, | |||
PageSize: setting.UI.ExplorePagingNum, | |||
}, | |||
Keyword: keyword, | |||
RecommendOnly: ctx.QueryBool("recommend"), | |||
SearchOrderBy: orderBy, | |||
Keyword: keyword, | |||
RecommendOnly: ctx.QueryBool("recommend"), | |||
CloudBrainType: -1, | |||
SearchOrderBy: orderBy, | |||
}) | |||
if err != nil { | |||
ctx.ServerError("SearchDataset", err) | |||
@@ -346,19 +346,22 @@ func ExploreDatasets(ctx *context.Context) { | |||
var datasetsIds []int64 | |||
if ownerID > 0 { | |||
datasetsIds = models.GetCollaboratorDatasetIdsByUserID(ownerID) | |||
collaboratorDatasetsIds := models.GetCollaboratorDatasetIdsByUserID(ownerID) | |||
teamDatasetsIds := models.GetTeamDatasetIdsByUserID(ownerID) | |||
datasetsIds = append(collaboratorDatasetsIds, teamDatasetsIds...) | |||
} | |||
opts := &models.SearchDatasetOptions{ | |||
Keyword: keyword, | |||
IncludePublic: true, | |||
SearchOrderBy: orderBy, | |||
Category: category, | |||
Task: task, | |||
License: license, | |||
OwnerID: ownerID, | |||
DatasetIDs: datasetsIds, | |||
RecommendOnly: ctx.QueryBool("recommend"), | |||
Keyword: keyword, | |||
IncludePublic: true, | |||
SearchOrderBy: orderBy, | |||
Category: category, | |||
Task: task, | |||
License: license, | |||
OwnerID: ownerID, | |||
DatasetIDs: datasetsIds, | |||
RecommendOnly: ctx.QueryBool("recommend"), | |||
CloudBrainType: -1, | |||
ListOptions: models.ListOptions{ | |||
Page: page, | |||
PageSize: 30, | |||
@@ -410,6 +410,90 @@ func MyDatasets(ctx *context.Context) { | |||
}) | |||
} | |||
func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) { | |||
page := ctx.QueryInt("page") | |||
cloudbrainType := ctx.QueryInt("type") | |||
keyword := strings.Trim(ctx.Query("q"), " ") | |||
orderBy := models.SearchOrderByRecentUpdated | |||
var ownerID int64 | |||
if ctx.User != nil { | |||
ownerID = ctx.User.ID | |||
} | |||
opts.Keyword = keyword | |||
opts.SearchOrderBy = orderBy | |||
opts.OwnerID = ownerID | |||
opts.RecommendOnly = ctx.QueryBool("recommend") | |||
opts.CloudBrainType = cloudbrainType | |||
opts.ListOptions = models.ListOptions{ | |||
Page: page, | |||
PageSize: setting.UI.DatasetPagingNum, | |||
} | |||
opts.NeedAttachment = true | |||
opts.JustNeedZipFile = true | |||
opts.User = ctx.User | |||
datasets, count, err := models.SearchDataset(opts) | |||
if err != nil { | |||
ctx.ServerError("datasets", err) | |||
return | |||
} | |||
data, err := json.Marshal(datasets) | |||
if err != nil { | |||
log.Error("json.Marshal failed:", err.Error()) | |||
ctx.JSON(200, map[string]string{ | |||
"result_code": "-1", | |||
"error_msg": err.Error(), | |||
"data": "", | |||
}) | |||
return | |||
} | |||
ctx.JSON(200, map[string]string{ | |||
"result_code": "0", | |||
"data": string(data), | |||
"count": strconv.FormatInt(count, 10), | |||
}) | |||
} | |||
func CurrentRepoDatasetMultiple(ctx *context.Context) { | |||
opts := &models.SearchDatasetOptions{ | |||
RepoID: ctx.Repo.Repository.ID, | |||
} | |||
datasetMultiple(ctx, opts) | |||
} | |||
func MyDatasetsMultiple(ctx *context.Context) { | |||
opts := &models.SearchDatasetOptions{ | |||
UploadAttachmentByMe: true, | |||
} | |||
datasetMultiple(ctx, opts) | |||
} | |||
func PublicDatasetMultiple(ctx *context.Context) { | |||
opts := &models.SearchDatasetOptions{ | |||
PublicOnly: true, | |||
} | |||
datasetMultiple(ctx, opts) | |||
} | |||
func MyFavoriteDatasetMultiple(ctx *context.Context) { | |||
opts := &models.SearchDatasetOptions{ | |||
StarByMe: true, | |||
DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID), | |||
} | |||
datasetMultiple(ctx, opts) | |||
} | |||
func PublicDataset(ctx *context.Context) { | |||
page := ctx.QueryInt("page") | |||
cloudbrainType := ctx.QueryInt("type") | |||
@@ -1026,6 +1026,11 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Get("/public_datasets", repo.PublicDataset) | |||
m.Get("/my_favorite", repo.MyFavoriteDataset) | |||
m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple) | |||
m.Get("/my_datasets_m", repo.MyDatasetsMultiple) | |||
m.Get("/public_datasets_m", repo.PublicDatasetMultiple) | |||
m.Get("/my_favorite_m", repo.MyFavoriteDatasetMultiple) | |||
m.Group("/status", func() { | |||
m.Get("/:uuid", repo.GetDatasetStatus) | |||
}) | |||
@@ -263,6 +263,7 @@ func Profile(ctx *context.Context) { | |||
Page: page, | |||
PageSize: setting.UI.ExplorePagingNum, | |||
}, | |||
CloudBrainType: -1, | |||
} | |||
if len(datasetSearchOptions.SearchOrderBy) == 0 { | |||