You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.go 19 kB

3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746
  1. package repo
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "net/http"
  6. "sort"
  7. "strconv"
  8. "strings"
  9. "unicode/utf8"
  10. "code.gitea.io/gitea/models"
  11. "code.gitea.io/gitea/modules/auth"
  12. "code.gitea.io/gitea/modules/base"
  13. "code.gitea.io/gitea/modules/context"
  14. "code.gitea.io/gitea/modules/log"
  15. "code.gitea.io/gitea/modules/setting"
  16. )
  17. const (
  18. tplIndex base.TplName = "repo/datasets/index"
  19. tplDatasetCreate base.TplName = "repo/datasets/create"
  20. tplDatasetEdit base.TplName = "repo/datasets/edit"
  21. taskstplIndex base.TplName = "repo/datasets/tasks/index"
  22. )
  23. // MustEnableDataset check if repository enable internal dataset
  24. func MustEnableDataset(ctx *context.Context) {
  25. if !ctx.Repo.CanRead(models.UnitTypeDatasets) {
  26. ctx.NotFound("MustEnableDataset", nil)
  27. return
  28. }
  29. }
  30. func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment, repo *models.Repository) []*models.Attachment {
  31. if ctx.Repo.CanWrite(models.UnitTypeDatasets) {
  32. log.Info("can write.")
  33. return list
  34. } else {
  35. if repo.Owner == nil {
  36. repo.GetOwner()
  37. }
  38. permission := false
  39. if repo.Owner.IsOrganization() && ctx.User != nil {
  40. if repo.Owner.IsUserPartOfOrg(ctx.User.ID) {
  41. log.Info("user is member of org.")
  42. permission = true
  43. }
  44. }
  45. if !permission && ctx.User != nil {
  46. isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
  47. if isCollaborator {
  48. log.Info("Collaborator user may visit the attach.")
  49. permission = true
  50. }
  51. }
  52. var publicList []*models.Attachment
  53. for _, attach := range list {
  54. if !attach.IsPrivate {
  55. publicList = append(publicList, attach)
  56. } else {
  57. if permission {
  58. publicList = append(publicList, attach)
  59. }
  60. }
  61. }
  62. return publicList
  63. }
  64. }
  65. func QueryDataSet(ctx *context.Context) []*models.Attachment {
  66. repo := ctx.Repo.Repository
  67. dataset, err := models.GetDatasetByRepo(repo)
  68. if err != nil {
  69. log.Error("zou not found dataset 1")
  70. ctx.NotFound("GetDatasetByRepo", err)
  71. return nil
  72. }
  73. if ctx.Query("type") == "" {
  74. log.Error("zou not found type 2")
  75. ctx.NotFound("type error", nil)
  76. return nil
  77. }
  78. err = models.GetDatasetAttachments(ctx.QueryInt("type"), ctx.IsSigned, ctx.User, dataset)
  79. if err != nil {
  80. ctx.ServerError("GetDatasetAttachments", err)
  81. return nil
  82. }
  83. attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
  84. ctx.Data["SortType"] = ctx.Query("sort")
  85. sort.Slice(attachments, func(i, j int) bool {
  86. return attachments[i].CreatedUnix > attachments[j].CreatedUnix
  87. })
  88. return attachments
  89. }
  90. func DatasetIndex(ctx *context.Context) {
  91. log.Info("dataset index 1")
  92. MustEnableDataset(ctx)
  93. ctx.Data["PageIsDataset"] = true
  94. ctx.Data["SortType"] = ctx.Query("sort")
  95. repo := ctx.Repo.Repository
  96. dataset, err := models.GetDatasetByRepo(repo)
  97. ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets)
  98. if err != nil {
  99. log.Warn("query dataset, not found.")
  100. ctx.HTML(200, tplIndex)
  101. return
  102. }
  103. cloudbrainType := -1
  104. if ctx.Query("type") != "" {
  105. cloudbrainType = ctx.QueryInt("type")
  106. }
  107. err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset)
  108. if err != nil {
  109. ctx.ServerError("GetDatasetAttachments", err)
  110. return
  111. }
  112. attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
  113. if ctx.Data["SortType"] == "nameAsc" {
  114. sort.Slice(attachments, func(i, j int) bool {
  115. return strings.ToLower(attachments[i].Name) < strings.ToLower(attachments[j].Name)
  116. })
  117. } else if ctx.Data["SortType"] == "nameDesc" {
  118. sort.Slice(attachments, func(i, j int) bool {
  119. return strings.ToLower(attachments[i].Name) > strings.ToLower(attachments[j].Name)
  120. })
  121. } else if ctx.Data["SortType"] == "sizeAsc" {
  122. sort.Slice(attachments, func(i, j int) bool {
  123. return attachments[i].Size < attachments[j].Size
  124. })
  125. } else if ctx.Data["SortType"] == "sizeDesc" {
  126. sort.Slice(attachments, func(i, j int) bool {
  127. return attachments[i].Size > attachments[j].Size
  128. })
  129. } else if ctx.Data["SortType"] == "timeAsc" {
  130. sort.Slice(attachments, func(i, j int) bool {
  131. return attachments[i].CreatedUnix < attachments[j].CreatedUnix
  132. })
  133. } else {
  134. sort.Slice(attachments, func(i, j int) bool {
  135. return attachments[i].CreatedUnix > attachments[j].CreatedUnix
  136. })
  137. }
  138. page := ctx.QueryInt("page")
  139. if page <= 0 {
  140. page = 1
  141. }
  142. pagesize := ctx.QueryInt("pagesize")
  143. if pagesize <= 0 {
  144. pagesize = 10
  145. }
  146. pager := context.NewPagination(len(attachments), pagesize, page, 5)
  147. pageAttachments := getPageAttachments(attachments, page, pagesize)
  148. //load attachment creator
  149. for _, attachment := range pageAttachments {
  150. uploader, _ := models.GetUserByID(attachment.UploaderID)
  151. attachment.Uploader = uploader
  152. if !strings.HasSuffix(attachment.Name, ".zip") && !strings.HasSuffix(attachment.Name, ".tar.gz") {
  153. attachment.DecompressState = -1 //非压缩文件
  154. }
  155. }
  156. ctx.Data["Page"] = pager
  157. ctx.Data["Title"] = ctx.Tr("dataset.show_dataset")
  158. ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets"
  159. ctx.Data["dataset"] = dataset
  160. ctx.Data["Attachments"] = pageAttachments
  161. ctx.Data["IsOwner"] = true
  162. ctx.Data["StoreType"] = setting.Attachment.StoreType
  163. ctx.Data["Type"] = cloudbrainType
  164. renderAttachmentSettings(ctx)
  165. ctx.HTML(200, tplIndex)
  166. }
  167. func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment {
  168. begin := (page - 1) * pagesize
  169. end := (page) * pagesize
  170. if begin > len(attachments)-1 {
  171. return nil
  172. }
  173. if end > len(attachments)-1 {
  174. return attachments[begin:]
  175. } else {
  176. return attachments[begin:end]
  177. }
  178. }
  179. func CreateDataset(ctx *context.Context) {
  180. MustEnableDataset(ctx)
  181. ctx.Data["PageIsDataset"] = true
  182. ctx.HTML(200, tplDatasetCreate)
  183. }
  184. func EditDataset(ctx *context.Context) {
  185. MustEnableDataset(ctx)
  186. ctx.Data["PageIsDataset"] = true
  187. datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
  188. dataset, _ := models.GetDatasetByID(datasetId)
  189. if dataset == nil {
  190. ctx.Error(http.StatusNotFound, "")
  191. return
  192. }
  193. ctx.Data["Dataset"] = dataset
  194. ctx.HTML(200, tplDatasetEdit)
  195. }
  196. func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) {
  197. dataset := &models.Dataset{}
  198. if !NamePattern.MatchString(form.Title) {
  199. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
  200. return
  201. }
  202. if utf8.RuneCountInString(form.Description) > 1024 {
  203. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024)))
  204. return
  205. }
  206. dataset.RepoID = ctx.Repo.Repository.ID
  207. dataset.UserID = ctx.User.ID
  208. dataset.Category = form.Category
  209. dataset.Task = form.Task
  210. dataset.Title = form.Title
  211. dataset.License = form.License
  212. dataset.Description = form.Description
  213. dataset.DownloadTimes = 0
  214. if ctx.Repo.Repository.IsPrivate {
  215. dataset.Status = 0
  216. } else {
  217. dataset.Status = 1
  218. }
  219. err := models.CreateDataset(dataset)
  220. if err != nil {
  221. log.Error("fail to create dataset", err)
  222. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail")))
  223. } else {
  224. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  225. }
  226. }
  227. func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
  228. ctx.Data["PageIsDataset"] = true
  229. ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset")
  230. if !NamePattern.MatchString(form.Title) {
  231. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
  232. return
  233. }
  234. if utf8.RuneCountInString(form.Description) > 1024 {
  235. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 1024)))
  236. return
  237. }
  238. rel, err := models.GetDatasetByID(form.ID)
  239. ctx.Data["dataset"] = rel
  240. if err != nil {
  241. log.Error("failed to query dataset", err)
  242. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
  243. return
  244. }
  245. rel.Title = form.Title
  246. rel.Description = form.Description
  247. rel.Category = form.Category
  248. rel.Task = form.Task
  249. rel.License = form.License
  250. if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil {
  251. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
  252. }
  253. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  254. }
  255. func DatasetAction(ctx *context.Context) {
  256. var err error
  257. datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
  258. switch ctx.Params(":action") {
  259. case "star":
  260. err = models.StarDataset(ctx.User.ID, datasetId, true)
  261. case "unstar":
  262. err = models.StarDataset(ctx.User.ID, datasetId, false)
  263. }
  264. if err != nil {
  265. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
  266. } else {
  267. ctx.JSON(http.StatusOK, models.BaseOKMessage)
  268. }
  269. }
  270. func CurrentRepoDataset(ctx *context.Context) {
  271. page := ctx.QueryInt("page")
  272. cloudbrainType := ctx.QueryInt("type")
  273. keyword := strings.Trim(ctx.Query("q"), " ")
  274. repo := ctx.Repo.Repository
  275. var datasetIDs []int64
  276. dataset, err := models.GetDatasetByRepo(repo)
  277. if err != nil {
  278. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
  279. return
  280. }
  281. datasetIDs = append(datasetIDs, dataset.ID)
  282. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  283. ListOptions: models.ListOptions{
  284. Page: page,
  285. PageSize: setting.UI.DatasetPagingNum,
  286. },
  287. Keyword: keyword,
  288. NeedDatasetIDs: true,
  289. DatasetIDs: datasetIDs,
  290. Type: cloudbrainType,
  291. NeedIsPrivate: false,
  292. JustNeedZipFile: true,
  293. NeedRepoInfo: true,
  294. })
  295. if err != nil {
  296. ctx.ServerError("datasets", err)
  297. return
  298. }
  299. data, err := json.Marshal(datasets)
  300. if err != nil {
  301. log.Error("json.Marshal failed:", err.Error())
  302. ctx.JSON(200, map[string]string{
  303. "result_code": "-1",
  304. "error_msg": err.Error(),
  305. "data": "",
  306. })
  307. return
  308. }
  309. ctx.JSON(200, map[string]string{
  310. "result_code": "0",
  311. "data": string(data),
  312. "count": strconv.FormatInt(count, 10),
  313. })
  314. }
  315. func MyDatasets(ctx *context.Context) {
  316. page := ctx.QueryInt("page")
  317. cloudbrainType := ctx.QueryInt("type")
  318. keyword := strings.Trim(ctx.Query("q"), " ")
  319. uploaderID := ctx.User.ID
  320. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  321. ListOptions: models.ListOptions{
  322. Page: page,
  323. PageSize: setting.UI.DatasetPagingNum,
  324. },
  325. Keyword: keyword,
  326. NeedDatasetIDs: false,
  327. UploaderID: uploaderID,
  328. Type: cloudbrainType,
  329. NeedIsPrivate: false,
  330. JustNeedZipFile: true,
  331. NeedRepoInfo: true,
  332. RecommendOnly: ctx.QueryBool("recommend"),
  333. })
  334. if err != nil {
  335. ctx.ServerError("datasets", err)
  336. return
  337. }
  338. data, err := json.Marshal(datasets)
  339. if err != nil {
  340. log.Error("json.Marshal failed:", err.Error())
  341. ctx.JSON(200, map[string]string{
  342. "result_code": "-1",
  343. "error_msg": err.Error(),
  344. "data": "",
  345. })
  346. return
  347. }
  348. ctx.JSON(200, map[string]string{
  349. "result_code": "0",
  350. "data": string(data),
  351. "count": strconv.FormatInt(count, 10),
  352. })
  353. }
  354. func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
  355. page := ctx.QueryInt("page")
  356. cloudbrainType := ctx.QueryInt("type")
  357. keyword := strings.Trim(ctx.Query("q"), " ")
  358. orderBy := models.SearchOrderByRecentUpdated
  359. var ownerID int64
  360. if ctx.User != nil {
  361. ownerID = ctx.User.ID
  362. }
  363. opts.Keyword = keyword
  364. opts.SearchOrderBy = orderBy
  365. opts.OwnerID = ownerID
  366. opts.RecommendOnly = ctx.QueryBool("recommend")
  367. opts.CloudBrainType = cloudbrainType
  368. opts.ListOptions = models.ListOptions{
  369. Page: page,
  370. PageSize: setting.UI.DatasetPagingNum,
  371. }
  372. opts.NeedAttachment = true
  373. opts.JustNeedZipFile = true
  374. opts.User = ctx.User
  375. datasets, count, err := models.SearchDataset(opts)
  376. if err != nil {
  377. ctx.ServerError("datasets", err)
  378. return
  379. }
  380. data, err := json.Marshal(datasets)
  381. if err != nil {
  382. log.Error("json.Marshal failed:", err.Error())
  383. ctx.JSON(200, map[string]string{
  384. "result_code": "-1",
  385. "error_msg": err.Error(),
  386. "data": "",
  387. })
  388. return
  389. }
  390. ctx.JSON(200, map[string]string{
  391. "result_code": "0",
  392. "data": string(data),
  393. "count": strconv.FormatInt(count, 10),
  394. })
  395. }
  396. func CurrentRepoDatasetMultiple(ctx *context.Context) {
  397. opts := &models.SearchDatasetOptions{
  398. RepoID: ctx.Repo.Repository.ID,
  399. }
  400. datasetMultiple(ctx, opts)
  401. }
  402. func MyDatasetsMultiple(ctx *context.Context) {
  403. opts := &models.SearchDatasetOptions{
  404. UploadAttachmentByMe: true,
  405. }
  406. datasetMultiple(ctx, opts)
  407. }
  408. func PublicDatasetMultiple(ctx *context.Context) {
  409. opts := &models.SearchDatasetOptions{
  410. PublicOnly: true,
  411. }
  412. datasetMultiple(ctx, opts)
  413. }
  414. func MyFavoriteDatasetMultiple(ctx *context.Context) {
  415. opts := &models.SearchDatasetOptions{
  416. StarByMe: true,
  417. DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID),
  418. }
  419. datasetMultiple(ctx, opts)
  420. }
  421. func PublicDataset(ctx *context.Context) {
  422. page := ctx.QueryInt("page")
  423. cloudbrainType := ctx.QueryInt("type")
  424. keyword := strings.Trim(ctx.Query("q"), " ")
  425. datasets, count, err := models.Attachments(&models.AttachmentsOptions{
  426. ListOptions: models.ListOptions{
  427. Page: page,
  428. PageSize: setting.UI.DatasetPagingNum,
  429. },
  430. Keyword: keyword,
  431. NeedDatasetIDs: false,
  432. NeedIsPrivate: true,
  433. IsPrivate: false,
  434. Type: cloudbrainType,
  435. JustNeedZipFile: true,
  436. NeedRepoInfo: true,
  437. RecommendOnly: ctx.QueryBool("recommend"),
  438. })
  439. if err != nil {
  440. ctx.ServerError("datasets", err)
  441. return
  442. }
  443. data, err := json.Marshal(datasets)
  444. if err != nil {
  445. log.Error("json.Marshal failed:", err.Error())
  446. ctx.JSON(200, map[string]string{
  447. "result_code": "-1",
  448. "error_msg": err.Error(),
  449. "data": "",
  450. })
  451. return
  452. }
  453. ctx.JSON(200, map[string]string{
  454. "result_code": "0",
  455. "data": string(data),
  456. "count": strconv.FormatInt(count, 10),
  457. })
  458. }
  459. func MyFavoriteDataset(ctx *context.Context) {
  460. UserId := ctx.User.ID
  461. cloudbrainType := ctx.QueryInt("type")
  462. keyword := strings.Trim(ctx.Query("q"), " ")
  463. var NotColDatasetIDs []int64
  464. var IsColDatasetIDs []int64
  465. datasetStars, err := models.GetDatasetStarByUser(ctx.User)
  466. if err != nil {
  467. ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
  468. log.Error("GetDatasetStarByUser failed:", err.Error())
  469. ctx.JSON(200, map[string]string{
  470. "result_code": "-1",
  471. "error_msg": err.Error(),
  472. "data": "",
  473. })
  474. return
  475. }
  476. //If the dataset has been deleted, it will not be counted
  477. for _, datasetStar := range datasetStars {
  478. IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
  479. if err != nil {
  480. log.Error("IsDatasetStarExist error:", err.Error())
  481. }
  482. if IsExist {
  483. DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
  484. if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
  485. IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
  486. } else {
  487. NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
  488. }
  489. }
  490. }
  491. NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
  492. Keyword: keyword,
  493. NeedDatasetIDs: true,
  494. DatasetIDs: NotColDatasetIDs,
  495. NeedIsPrivate: true,
  496. IsPrivate: false,
  497. Type: cloudbrainType,
  498. JustNeedZipFile: true,
  499. NeedRepoInfo: true,
  500. RecommendOnly: ctx.QueryBool("recommend"),
  501. UserId: UserId,
  502. })
  503. if err != nil {
  504. ctx.ServerError("datasets", err)
  505. return
  506. }
  507. //If is collaborator, there is no need to determine whether the dataset is private or public
  508. IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
  509. Keyword: keyword,
  510. NeedDatasetIDs: true,
  511. DatasetIDs: IsColDatasetIDs,
  512. NeedIsPrivate: false,
  513. Type: cloudbrainType,
  514. JustNeedZipFile: true,
  515. NeedRepoInfo: true,
  516. RecommendOnly: ctx.QueryBool("recommend"),
  517. UserId: UserId,
  518. })
  519. if err != nil {
  520. ctx.ServerError("datasets", err)
  521. return
  522. }
  523. for _, NotColDataset := range NotColDatasets {
  524. IsColDatasets = append(IsColDatasets, NotColDataset)
  525. }
  526. datasets := IsColDatasets
  527. count := NotColcount + IsColcount
  528. sort.Slice(datasets, func(i, j int) bool {
  529. return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
  530. })
  531. page := ctx.QueryInt("page")
  532. if page <= 0 {
  533. page = 1
  534. }
  535. pagesize := ctx.QueryInt("pagesize")
  536. if pagesize <= 0 {
  537. pagesize = 5
  538. }
  539. pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
  540. if pageDatasetsInfo == nil {
  541. ctx.JSON(200, map[string]string{
  542. "result_code": "0",
  543. "data": "[]",
  544. "count": strconv.FormatInt(count, 10),
  545. })
  546. return
  547. }
  548. data, err := json.Marshal(pageDatasetsInfo)
  549. log.Info("data:", data)
  550. if err != nil {
  551. log.Error("json.Marshal failed:", err.Error())
  552. ctx.JSON(200, map[string]string{
  553. "result_code": "-1",
  554. "error_msg": err.Error(),
  555. "data": "",
  556. })
  557. return
  558. }
  559. ctx.JSON(200, map[string]string{
  560. "result_code": "0",
  561. "data": string(data),
  562. "count": strconv.FormatInt(count, 10),
  563. })
  564. }
  565. func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
  566. begin := (page - 1) * pagesize
  567. end := (page) * pagesize
  568. if begin > len(AttachmentInfos)-1 {
  569. return nil
  570. }
  571. if end > len(AttachmentInfos)-1 {
  572. return AttachmentInfos[begin:]
  573. } else {
  574. return AttachmentInfos[begin:end]
  575. }
  576. }
  577. func getTotalPage(total int64, pageSize int) int {
  578. another := 0
  579. if int(total)%pageSize != 0 {
  580. another = 1
  581. }
  582. return int(total)/pageSize + another
  583. }
  584. func GetDatasetStatus(ctx *context.Context) {
  585. var (
  586. err error
  587. )
  588. UUID := ctx.Params(":uuid")
  589. attachment, err := models.GetAttachmentByUUID(UUID)
  590. if err != nil {
  591. log.Error("GetDatasetStarByUser failed:", err.Error())
  592. ctx.JSON(200, map[string]string{
  593. "result_code": "-1",
  594. "error_msg": err.Error(),
  595. "data": "",
  596. })
  597. return
  598. }
  599. ctx.JSON(200, map[string]string{
  600. "result_code": "0",
  601. "UUID": UUID,
  602. "AttachmentStatus": fmt.Sprint(attachment.DecompressState),
  603. })
  604. }
  605. func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
  606. repo, err := models.GetRepositoryByID(dataset.RepoID)
  607. if err != nil {
  608. log.Error("query repo error:", err.Error())
  609. } else {
  610. repo.GetOwner()
  611. if ctx.User != nil {
  612. if repo.Owner.IsOrganization() {
  613. org := repo.Owner
  614. org.Teams, err = org.GetUserTeams(ctx.User.ID)
  615. if err != nil {
  616. log.Error("GetUserTeams error:", err.Error())
  617. return false
  618. }
  619. if org.IsUserPartOfOrg(ctx.User.ID) {
  620. for _, t := range org.Teams {
  621. if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
  622. return true
  623. }
  624. }
  625. isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
  626. if isOwner {
  627. return isOwner
  628. }
  629. return false
  630. }
  631. }
  632. isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
  633. if isCollaborator {
  634. return true
  635. }
  636. }
  637. }
  638. return false
  639. }
  640. func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
  641. dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
  642. if err != nil {
  643. log.Error("query dataset error:", err.Error())
  644. return false, nil, nil, err
  645. } else {
  646. repo, err := models.GetRepositoryByID(dataset.RepoID)
  647. if err != nil {
  648. log.Error("GetRepositoryByID error:", err.Error())
  649. return false, nil, nil, err
  650. }
  651. return true, repo, dataset, nil
  652. }
  653. }