You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

dataset.go 16 kB

5 years ago
3 years ago
2 years ago
3 years ago
3 years ago
5 years ago
2 years ago
2 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
2 years ago
3 years ago
2 years ago
3 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
3 years ago
2 years ago
3 years ago
2 years ago
2 years ago
3 years ago
2 years ago
3 years ago
2 years ago
5 years ago
3 years ago
3 years ago
3 years ago
2 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
2 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
3 years ago
5 years ago
3 years ago
3 years ago
5 years ago
2 years ago
3 years ago
2 years ago
5 years ago
2 years ago
5 years ago
2 years ago
5 years ago
3 years ago
5 years ago
5 years ago
5 years ago
5 years ago
3 years ago
5 years ago
3 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
5 years ago
5 years ago
3 years ago
3 years ago
3 years ago
3 years ago
3 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613
  1. package models
  2. import (
  3. "errors"
  4. "fmt"
  5. "sort"
  6. "strings"
  7. "code.gitea.io/gitea/modules/setting"
  8. "code.gitea.io/gitea/modules/log"
  9. "code.gitea.io/gitea/modules/timeutil"
  10. "xorm.io/builder"
  11. )
  12. const (
  13. DatasetStatusPrivate int32 = iota
  14. DatasetStatusPublic
  15. DatasetStatusDeleted
  16. )
  17. type Dataset struct {
  18. ID int64 `xorm:"pk autoincr"`
  19. Title string `xorm:"INDEX NOT NULL""`
  20. Status int32 `xorm:"INDEX""` // normal_private: 0, pulbic: 1, is_delete: 2
  21. Category string
  22. Description string `xorm:"TEXT"`
  23. DownloadTimes int64
  24. UseCount int64 `xorm:"DEFAULT 0"`
  25. NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
  26. Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"`
  27. License string
  28. Task string
  29. ReleaseID int64 `xorm:"INDEX"`
  30. UserID int64 `xorm:"INDEX"`
  31. RepoID int64 `xorm:"INDEX"`
  32. Repo *Repository `xorm:"-"`
  33. CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
  34. UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
  35. User *User `xorm:"-"`
  36. Attachments []*Attachment `xorm:"-"`
  37. }
  38. type DatasetWithStar struct {
  39. Dataset
  40. IsStaring bool
  41. }
  42. func (d *Dataset) IsPrivate() bool {
  43. switch d.Status {
  44. case DatasetStatusPrivate:
  45. return true
  46. case DatasetStatusPublic:
  47. return false
  48. case DatasetStatusDeleted:
  49. return false
  50. default:
  51. return false
  52. }
  53. }
  54. type DatasetList []*Dataset
  55. func (datasets DatasetList) loadAttributes(e Engine) error {
  56. if len(datasets) == 0 {
  57. return nil
  58. }
  59. set := make(map[int64]struct{})
  60. userIdSet := make(map[int64]struct{})
  61. datasetIDs := make([]int64, len(datasets))
  62. for i := range datasets {
  63. userIdSet[datasets[i].UserID] = struct{}{}
  64. set[datasets[i].RepoID] = struct{}{}
  65. datasetIDs[i] = datasets[i].ID
  66. }
  67. // Load owners.
  68. users := make(map[int64]*User, len(userIdSet))
  69. repos := make(map[int64]*Repository, len(set))
  70. if err := e.
  71. Where("id > 0").
  72. In("id", keysInt64(userIdSet)).
  73. Cols("id", "lower_name", "name", "full_name", "email").
  74. Find(&users); err != nil {
  75. return fmt.Errorf("find users: %v", err)
  76. }
  77. if err := e.
  78. Where("id > 0").
  79. In("id", keysInt64(set)).
  80. Cols("id", "owner_id", "owner_name", "lower_name", "name", "description", "alias", "lower_alias", "is_private").
  81. Find(&repos); err != nil {
  82. return fmt.Errorf("find repos: %v", err)
  83. }
  84. for i := range datasets {
  85. datasets[i].User = users[datasets[i].UserID]
  86. datasets[i].Repo = repos[datasets[i].RepoID]
  87. }
  88. return nil
  89. }
  90. func (datasets DatasetList) loadAttachmentAttributes(opts *SearchDatasetOptions) error {
  91. if len(datasets) == 0 {
  92. return nil
  93. }
  94. datasetIDs := make([]int64, len(datasets))
  95. for i := range datasets {
  96. datasetIDs[i] = datasets[i].ID
  97. }
  98. attachments, err := AttachmentsByDatasetOption(datasetIDs, opts)
  99. if err != nil {
  100. return fmt.Errorf("GetAttachmentsByDatasetIds failed error: %v", err)
  101. }
  102. permissionMap := make(map[int64]bool, len(datasets))
  103. for _, attachment := range attachments {
  104. for i := range datasets {
  105. if attachment.DatasetID == datasets[i].ID {
  106. if !attachment.IsPrivate {
  107. datasets[i].Attachments = append(datasets[i].Attachments, attachment)
  108. } else {
  109. permission, ok := permissionMap[datasets[i].ID]
  110. if !ok {
  111. permission = false
  112. datasets[i].Repo.GetOwner()
  113. if !permission {
  114. if datasets[i].Repo.OwnerID == opts.User.ID {
  115. permission = true
  116. } else {
  117. isCollaborator, _ := datasets[i].Repo.IsCollaborator(opts.User.ID)
  118. isInRepoTeam, _ := datasets[i].Repo.IsInRepoTeam(opts.User.ID)
  119. if isCollaborator || isInRepoTeam {
  120. permission = true
  121. }
  122. }
  123. }
  124. permissionMap[datasets[i].ID] = permission
  125. }
  126. if permission {
  127. datasets[i].Attachments = append(datasets[i].Attachments, attachment)
  128. }
  129. }
  130. }
  131. }
  132. }
  133. for i := range datasets {
  134. if datasets[i].Attachments == nil {
  135. datasets[i].Attachments = []*Attachment{}
  136. }
  137. datasets[i].Repo.Owner = nil
  138. }
  139. return nil
  140. }
  141. type SearchDatasetOptions struct {
  142. Keyword string
  143. OwnerID int64
  144. User *User
  145. RepoID int64
  146. IncludePublic bool
  147. RecommendOnly bool
  148. Category string
  149. Task string
  150. License string
  151. DatasetIDs []int64
  152. ExcludeDatasetId int64
  153. ListOptions
  154. SearchOrderBy
  155. IsOwner bool
  156. StarByMe bool
  157. CloudBrainType int //0 cloudbrain 1 modelarts -1 all
  158. PublicOnly bool
  159. JustNeedZipFile bool
  160. NeedAttachment bool
  161. UploadAttachmentByMe bool
  162. QueryReference bool
  163. }
  164. func CreateDataset(dataset *Dataset) (err error) {
  165. sess := x.NewSession()
  166. defer sess.Close()
  167. if err := sess.Begin(); err != nil {
  168. return err
  169. }
  170. datasetByRepoId := &Dataset{RepoID: dataset.RepoID}
  171. has, err := sess.Get(datasetByRepoId)
  172. if err != nil {
  173. return err
  174. }
  175. if has {
  176. return fmt.Errorf("The dataset already exists.")
  177. }
  178. if _, err = sess.Insert(dataset); err != nil {
  179. return err
  180. }
  181. return sess.Commit()
  182. }
  183. func RecommendDataset(dataSetId int64, recommend bool) error {
  184. dataset := Dataset{Recommend: recommend}
  185. _, err := x.ID(dataSetId).Cols("recommend").Update(dataset)
  186. return err
  187. }
  188. func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) {
  189. cond := SearchDatasetCondition(opts)
  190. return SearchDatasetByCondition(opts, cond)
  191. }
  192. func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
  193. var cond = builder.NewCond()
  194. cond = cond.And(builder.Neq{"dataset.status": DatasetStatusDeleted})
  195. cond = generateFilterCond(opts, cond)
  196. if opts.RepoID > 0 {
  197. cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID})
  198. }
  199. if opts.ExcludeDatasetId > 0 {
  200. cond = cond.And(builder.Neq{"dataset.id": opts.ExcludeDatasetId})
  201. }
  202. if opts.PublicOnly {
  203. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  204. cond = cond.And(builder.Eq{"attachment.is_private": false})
  205. } else if opts.IncludePublic {
  206. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  207. cond = cond.And(builder.Eq{"attachment.is_private": false})
  208. if opts.OwnerID > 0 {
  209. subCon := builder.NewCond()
  210. subCon = subCon.And(builder.Eq{"repository.owner_id": opts.OwnerID})
  211. subCon = generateFilterCond(opts, subCon)
  212. cond = cond.Or(subCon)
  213. }
  214. } else if opts.OwnerID > 0 && !opts.StarByMe && !opts.UploadAttachmentByMe {
  215. cond = cond.And(builder.Eq{"repository.owner_id": opts.OwnerID})
  216. if !opts.IsOwner {
  217. cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
  218. cond = cond.And(builder.Eq{"attachment.is_private": false})
  219. }
  220. }
  221. if len(opts.DatasetIDs) > 0 {
  222. if opts.StarByMe || (opts.RepoID == 0 && opts.QueryReference) {
  223. cond = cond.And(builder.In("dataset.id", opts.DatasetIDs))
  224. } else {
  225. subCon := builder.NewCond()
  226. subCon = subCon.And(builder.In("dataset.id", opts.DatasetIDs))
  227. subCon = generateFilterCond(opts, subCon)
  228. cond = cond.Or(subCon)
  229. }
  230. } else {
  231. if opts.StarByMe {
  232. cond = cond.And(builder.Eq{"dataset.id": -1})
  233. }
  234. }
  235. return cond
  236. }
  237. func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.Cond {
  238. if len(opts.Keyword) > 0 {
  239. cond = cond.And(builder.Or(builder.Like{"LOWER(dataset.title)", strings.ToLower(opts.Keyword)}, builder.Like{"LOWER(dataset.description)", strings.ToLower(opts.Keyword)}))
  240. }
  241. if len(opts.Category) > 0 {
  242. cond = cond.And(builder.Eq{"dataset.category": opts.Category})
  243. }
  244. if len(opts.Task) > 0 {
  245. cond = cond.And(builder.Eq{"dataset.task": opts.Task})
  246. }
  247. if len(opts.License) > 0 {
  248. cond = cond.And(builder.Eq{"dataset.license": opts.License})
  249. }
  250. if opts.RecommendOnly {
  251. cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly})
  252. }
  253. if opts.JustNeedZipFile {
  254. cond = cond.And(builder.Gt{"attachment.decompress_state": 0})
  255. }
  256. if opts.CloudBrainType >= 0 {
  257. cond = cond.And(builder.Eq{"attachment.type": opts.CloudBrainType})
  258. }
  259. if opts.UploadAttachmentByMe {
  260. cond = cond.And(builder.Eq{"attachment.uploader_id": opts.User.ID})
  261. }
  262. return cond
  263. }
  264. func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (DatasetList, int64, error) {
  265. if opts.Page <= 0 {
  266. opts.Page = 1
  267. }
  268. var err error
  269. sess := x.NewSession()
  270. defer sess.Close()
  271. datasets := make(DatasetList, 0, opts.PageSize)
  272. selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend,dataset.use_count"
  273. count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id").
  274. Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
  275. Where(cond).Count(new(Dataset))
  276. if err != nil {
  277. return nil, 0, fmt.Errorf("Count: %v", err)
  278. }
  279. builderQuery := builder.Dialect(setting.Database.Type).Select("id", "title", "status", "category", "description", "download_times", "license", "task", "release_id", "user_id", "repo_id", "created_unix", "updated_unix", "num_stars", "recommend", "use_count").From(builder.Dialect(setting.Database.Type).Select(selectColumnsSql).From("dataset").Join("INNER", "repository", "repository.id = dataset.repo_id").
  280. Join("INNER", "attachment", "attachment.dataset_id=dataset.id").
  281. Where(cond), "d").OrderBy(opts.SearchOrderBy.String())
  282. if opts.PageSize > 0 {
  283. builderQuery.Limit(opts.PageSize, (opts.Page-1)*opts.PageSize)
  284. }
  285. if err = sess.SQL(builderQuery).Find(&datasets); err != nil {
  286. return nil, 0, fmt.Errorf("Dataset: %v", err)
  287. }
  288. if err = datasets.loadAttributes(sess); err != nil {
  289. return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
  290. }
  291. if opts.NeedAttachment {
  292. if err = datasets.loadAttachmentAttributes(opts); err != nil {
  293. return nil, 0, fmt.Errorf("LoadAttributes: %v", err)
  294. }
  295. }
  296. return datasets, count, nil
  297. }
  298. type datasetMetaSearch struct {
  299. ID []int64
  300. Rel []*Dataset
  301. }
  302. func (s datasetMetaSearch) Len() int {
  303. return len(s.ID)
  304. }
  305. func (s datasetMetaSearch) Swap(i, j int) {
  306. s.ID[i], s.ID[j] = s.ID[j], s.ID[i]
  307. s.Rel[i], s.Rel[j] = s.Rel[j], s.Rel[i]
  308. }
  309. func (s datasetMetaSearch) Less(i, j int) bool {
  310. return s.ID[i] < s.ID[j]
  311. }
  312. func GetDatasetAttachments(typeCloudBrain int, isSigned bool, user *User, rels ...*Dataset) (err error) {
  313. return getDatasetAttachments(x, typeCloudBrain, isSigned, user, rels...)
  314. }
  315. func getDatasetAttachments(e Engine, typeCloudBrain int, isSigned bool, user *User, rels ...*Dataset) (err error) {
  316. if len(rels) == 0 {
  317. return
  318. }
  319. // To keep this efficient as possible sort all datasets by id,
  320. // select attachments by dataset id,
  321. // then merge join them
  322. // Sort
  323. var sortedRels = datasetMetaSearch{ID: make([]int64, len(rels)), Rel: make([]*Dataset, len(rels))}
  324. var attachments []*Attachment
  325. for index, element := range rels {
  326. element.Attachments = []*Attachment{}
  327. sortedRels.ID[index] = element.ID
  328. sortedRels.Rel[index] = element
  329. }
  330. sort.Sort(sortedRels)
  331. // Select attachments
  332. if typeCloudBrain == -1 {
  333. err = e.
  334. Asc("dataset_id").
  335. In("dataset_id", sortedRels.ID).
  336. Find(&attachments, Attachment{})
  337. if err != nil {
  338. return err
  339. }
  340. } else {
  341. err = e.
  342. Asc("dataset_id").
  343. In("dataset_id", sortedRels.ID).
  344. And("type = ?", typeCloudBrain).
  345. Find(&attachments, Attachment{})
  346. if err != nil {
  347. return err
  348. }
  349. }
  350. // merge join
  351. var currentIndex = 0
  352. for _, attachment := range attachments {
  353. for sortedRels.ID[currentIndex] < attachment.DatasetID {
  354. currentIndex++
  355. }
  356. fileChunks := make([]*FileChunk, 0, 10)
  357. err = e.
  358. Where("uuid = ?", attachment.UUID).
  359. Find(&fileChunks)
  360. if err != nil {
  361. return err
  362. }
  363. if len(fileChunks) > 0 {
  364. attachment.Md5 = fileChunks[0].Md5
  365. } else {
  366. log.Error("has attachment record, but has no file_chunk record")
  367. attachment.Md5 = "no_record"
  368. }
  369. attachment.CanDel = CanDelAttachment(isSigned, user, attachment)
  370. sortedRels.Rel[currentIndex].Attachments = append(sortedRels.Rel[currentIndex].Attachments, attachment)
  371. }
  372. return
  373. }
  374. // AddDatasetAttachments adds a Dataset attachments
  375. func AddDatasetAttachments(DatasetID int64, attachmentUUIDs []string) (err error) {
  376. // Check attachments
  377. attachments, err := GetAttachmentsByUUIDs(attachmentUUIDs)
  378. if err != nil {
  379. return fmt.Errorf("GetAttachmentsByUUIDs [uuids: %v]: %v", attachmentUUIDs, err)
  380. }
  381. for i := range attachments {
  382. attachments[i].DatasetID = DatasetID
  383. // No assign value could be 0, so ignore AllCols().
  384. if _, err = x.ID(attachments[i].ID).Update(attachments[i]); err != nil {
  385. return fmt.Errorf("update attachment [%d]: %v", attachments[i].ID, err)
  386. }
  387. }
  388. return
  389. }
  390. func UpdateDataset(ctx DBContext, rel *Dataset) error {
  391. _, err := ctx.e.ID(rel.ID).AllCols().Update(rel)
  392. return err
  393. }
  394. func IncreaseDatasetUseCount(uuid string) {
  395. IncreaseAttachmentUseNumber(uuid)
  396. attachments, _ := GetAttachmentsByUUIDs(strings.Split(uuid, ";"))
  397. countMap := make(map[int64]int)
  398. for _, attachment := range attachments {
  399. value, ok := countMap[attachment.DatasetID]
  400. if ok {
  401. countMap[attachment.DatasetID] = value + 1
  402. } else {
  403. countMap[attachment.DatasetID] = 1
  404. }
  405. }
  406. for key, value := range countMap {
  407. x.Exec("UPDATE `dataset` SET use_count=use_count+? WHERE id=?", value, key)
  408. }
  409. }
  410. // GetDatasetByID returns Dataset with given ID.
  411. func GetDatasetByID(id int64) (*Dataset, error) {
  412. rel := new(Dataset)
  413. has, err := x.
  414. ID(id).
  415. Get(rel)
  416. if err != nil {
  417. return nil, err
  418. } else if !has {
  419. return nil, ErrDatasetNotExist{id}
  420. }
  421. return rel, nil
  422. }
  423. func GetDatasetByRepo(repo *Repository) (*Dataset, error) {
  424. dataset := &Dataset{RepoID: repo.ID}
  425. has, err := x.Get(dataset)
  426. if err != nil {
  427. return nil, err
  428. }
  429. if has {
  430. return dataset, nil
  431. } else {
  432. return nil, ErrNotExist{repo.ID}
  433. }
  434. }
  435. func GetDatasetStarByUser(user *User) ([]*DatasetStar, error) {
  436. datasetStars := make([]*DatasetStar, 0)
  437. err := x.Cols("id", "uid", "dataset_id", "created_unix").Where("uid=?", user.ID).Find(&datasetStars)
  438. return datasetStars, err
  439. }
  440. func DeleteDataset(datasetID int64, uid int64) error {
  441. var err error
  442. sess := x.NewSession()
  443. defer sess.Close()
  444. if err = sess.Begin(); err != nil {
  445. return err
  446. }
  447. dataset := &Dataset{ID: datasetID, UserID: uid}
  448. has, err := sess.Get(dataset)
  449. if err != nil {
  450. return err
  451. } else if !has {
  452. return errors.New("not found")
  453. }
  454. if cnt, err := sess.ID(datasetID).Delete(new(Dataset)); err != nil {
  455. return err
  456. } else if cnt != 1 {
  457. return errors.New("not found")
  458. }
  459. if err = sess.Commit(); err != nil {
  460. sess.Close()
  461. return fmt.Errorf("Commit: %v", err)
  462. }
  463. return nil
  464. }
  465. func GetOwnerDatasetByID(id int64, user *User) (*Dataset, error) {
  466. dataset, err := GetDatasetByID(id)
  467. if err != nil {
  468. return nil, err
  469. }
  470. if !dataset.IsPrivate() {
  471. return dataset, nil
  472. }
  473. if dataset.IsPrivate() && user != nil && user.ID == dataset.UserID {
  474. return dataset, nil
  475. }
  476. return nil, errors.New("dataset not fount")
  477. }
  478. func IncreaseDownloadCount(datasetID int64) error {
  479. // Update download count.
  480. if _, err := x.Exec("UPDATE `dataset` SET download_times=download_times+1 WHERE id=?", datasetID); err != nil {
  481. return fmt.Errorf("increase dataset count: %v", err)
  482. }
  483. return nil
  484. }
  485. func GetCollaboratorDatasetIdsByUserID(userID int64) []int64 {
  486. var datasets []int64
  487. _ = x.Table("dataset").Join("INNER", "collaboration", "dataset.repo_id = collaboration.repo_id and collaboration.mode>0 and collaboration.user_id=?", userID).
  488. Cols("dataset.id").Find(&datasets)
  489. return datasets
  490. }
  491. func GetTeamDatasetIdsByUserID(userID int64) []int64 {
  492. var datasets []int64
  493. _ = x.Table("dataset").Join("INNER", "team_repo", "dataset.repo_id = team_repo.repo_id").
  494. Join("INNER", "team_user", "team_repo.team_id=team_user.team_id and team_user.uid=?", userID).
  495. Cols("dataset.id").Find(&datasets)
  496. return datasets
  497. }
  498. func UpdateDatasetCreateUser(ID int64, user *User) error {
  499. _, err := x.Where("id = ?", ID).Cols("user_id").Update(&Dataset{
  500. UserID: user.ID,
  501. })
  502. if err != nil {
  503. return err
  504. }
  505. return nil
  506. }
  507. func QueryDatasetGroupByTask() ([]map[string]interface{}, error) {
  508. rows, err := x.QueryInterface("SELECT count(*) as total,task FROM public.dataset where task <>'' group by task order by total desc limit 7")
  509. if err != nil {
  510. return nil, err
  511. }
  512. return rows, nil
  513. }