Browse Source

Merge pull request '解决智算网络NPU训练任务外网依赖的问题' (#3134) from npu-model-upload into V20221102

Reviewed-on: https://git.openi.org.cn/OpenI/aiforge/pulls/3134
Reviewed-by: ychao_1983 <ychao_1983@sina.com>
fix-mod
ychao_1983 2 years ago
parent
commit
39f646f776
100 changed files with 1212 additions and 76 deletions
  1. +6
    -6
      go.mod
  2. +21
    -19
      go.sum
  3. +2
    -0
      models/cloudbrain.go
  4. +1
    -0
      models/models.go
  5. +70
    -0
      models/schedule_record.go
  6. +14
    -0
      modules/cron/tasks_basic.go
  7. +42
    -6
      modules/grampus/grampus.go
  8. +22
    -1
      modules/setting/setting.go
  9. +23
    -27
      modules/storage/obs.go
  10. +93
    -0
      modules/urfs_client/config/constants.go
  11. +66
    -0
      modules/urfs_client/config/dfstore.go
  12. +32
    -0
      modules/urfs_client/config/headers.go
  13. +307
    -0
      modules/urfs_client/dfstore/dfstore.go
  14. +5
    -0
      modules/urfs_client/objectstorage/mocks/objectstorage_mock.go
  15. +47
    -0
      modules/urfs_client/objectstorage/objectstorage.go
  16. +112
    -0
      modules/urfs_client/urchin/schedule.go
  17. +276
    -0
      modules/urfs_client/urchin/urchinfs.go
  18. +3
    -0
      routers/api/v1/repo/cloudbrain.go
  19. +37
    -1
      routers/api/v1/repo/modelarts.go
  20. +6
    -0
      routers/repo/cloudbrain.go
  21. +22
    -11
      routers/repo/grampus.go
  22. +1
    -1
      templates/repo/cloudbrain/inference/show.tmpl
  23. +1
    -1
      templates/repo/cloudbrain/trainjob/show.tmpl
  24. +1
    -1
      templates/repo/grampus/trainjob/show.tmpl
  25. +1
    -1
      templates/repo/modelarts/inferencejob/show.tmpl
  26. +1
    -1
      templates/repo/modelarts/trainjob/show.tmpl
  27. +0
    -0
      vendor/cloud.google.com/go/LICENSE
  28. +0
    -0
      vendor/cloud.google.com/go/compute/metadata/metadata.go
  29. +0
    -0
      vendor/cloud.google.com/go/iam/iam.go
  30. +0
    -0
      vendor/cloud.google.com/go/internal/optional/optional.go
  31. +0
    -0
      vendor/cloud.google.com/go/internal/version/update_version.sh
  32. +0
    -0
      vendor/cloud.google.com/go/internal/version/version.go
  33. +0
    -0
      vendor/cloud.google.com/go/pubsub/README.md
  34. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/README.md
  35. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/doc.go
  36. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/iam.go
  37. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/path_funcs.go
  38. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/publisher_client.go
  39. +0
    -0
      vendor/cloud.google.com/go/pubsub/apiv1/subscriber_client.go
  40. +0
    -0
      vendor/cloud.google.com/go/pubsub/debug.go
  41. +0
    -0
      vendor/cloud.google.com/go/pubsub/doc.go
  42. +0
    -0
      vendor/cloud.google.com/go/pubsub/flow_controller.go
  43. +0
    -0
      vendor/cloud.google.com/go/pubsub/internal/distribution/distribution.go
  44. +0
    -0
      vendor/cloud.google.com/go/pubsub/iterator.go
  45. +0
    -0
      vendor/cloud.google.com/go/pubsub/message.go
  46. +0
    -0
      vendor/cloud.google.com/go/pubsub/nodebug.go
  47. +0
    -0
      vendor/cloud.google.com/go/pubsub/pubsub.go
  48. +0
    -0
      vendor/cloud.google.com/go/pubsub/pullstream.go
  49. +0
    -0
      vendor/cloud.google.com/go/pubsub/service.go
  50. +0
    -0
      vendor/cloud.google.com/go/pubsub/snapshot.go
  51. +0
    -0
      vendor/cloud.google.com/go/pubsub/subscription.go
  52. +0
    -0
      vendor/cloud.google.com/go/pubsub/topic.go
  53. +0
    -0
      vendor/cloud.google.com/go/pubsub/trace.go
  54. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/.gitignore
  55. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/LICENSE
  56. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/Makefile
  57. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/README.md
  58. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/checks/imports.go
  59. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/checks/license.go
  60. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/go.mod
  61. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/go.sum
  62. +0
    -0
      vendor/gitea.com/jolheiser/gitea-vet/main.go
  63. +0
    -0
      vendor/gitea.com/lunny/levelqueue/.drone.yml
  64. +0
    -0
      vendor/gitea.com/lunny/levelqueue/.gitignore
  65. +0
    -0
      vendor/gitea.com/lunny/levelqueue/LICENSE
  66. +0
    -0
      vendor/gitea.com/lunny/levelqueue/README.md
  67. +0
    -0
      vendor/gitea.com/lunny/levelqueue/error.go
  68. +0
    -0
      vendor/gitea.com/lunny/levelqueue/go.mod
  69. +0
    -0
      vendor/gitea.com/lunny/levelqueue/go.sum
  70. +0
    -0
      vendor/gitea.com/lunny/levelqueue/queue.go
  71. +0
    -0
      vendor/gitea.com/lunny/levelqueue/set.go
  72. +0
    -0
      vendor/gitea.com/lunny/levelqueue/uniquequeue.go
  73. +0
    -0
      vendor/gitea.com/macaron/binding/.drone.yml
  74. +0
    -0
      vendor/gitea.com/macaron/binding/.gitignore
  75. +0
    -0
      vendor/gitea.com/macaron/binding/LICENSE
  76. +0
    -0
      vendor/gitea.com/macaron/binding/README.md
  77. +0
    -0
      vendor/gitea.com/macaron/binding/binding.go
  78. +0
    -0
      vendor/gitea.com/macaron/binding/errors.go
  79. +0
    -0
      vendor/gitea.com/macaron/binding/go.mod
  80. +0
    -0
      vendor/gitea.com/macaron/binding/go.sum
  81. +0
    -0
      vendor/gitea.com/macaron/cache/.drone.yml
  82. +0
    -0
      vendor/gitea.com/macaron/cache/.gitignore
  83. +0
    -0
      vendor/gitea.com/macaron/cache/LICENSE
  84. +0
    -0
      vendor/gitea.com/macaron/cache/README.md
  85. +0
    -0
      vendor/gitea.com/macaron/cache/cache.go
  86. +0
    -0
      vendor/gitea.com/macaron/cache/file.go
  87. +0
    -0
      vendor/gitea.com/macaron/cache/go.mod
  88. +0
    -0
      vendor/gitea.com/macaron/cache/go.sum
  89. +0
    -0
      vendor/gitea.com/macaron/cache/memcache/memcache.go
  90. +0
    -0
      vendor/gitea.com/macaron/cache/memcache/memcache.goconvey
  91. +0
    -0
      vendor/gitea.com/macaron/cache/memory.go
  92. +0
    -0
      vendor/gitea.com/macaron/cache/redis/redis.go
  93. +0
    -0
      vendor/gitea.com/macaron/cache/redis/redis.goconvey
  94. +0
    -0
      vendor/gitea.com/macaron/cache/utils.go
  95. +0
    -0
      vendor/gitea.com/macaron/captcha/.drone.yml
  96. +0
    -0
      vendor/gitea.com/macaron/captcha/LICENSE
  97. +0
    -0
      vendor/gitea.com/macaron/captcha/README.md
  98. +0
    -0
      vendor/gitea.com/macaron/captcha/captcha.go
  99. +0
    -0
      vendor/gitea.com/macaron/captcha/go.mod
  100. +0
    -0
      vendor/gitea.com/macaron/captcha/go.sum

+ 6
- 6
go.mod View File

@@ -51,6 +51,7 @@ require (
github.com/go-enry/go-enry/v2 v2.3.0
github.com/go-git/go-billy/v5 v5.0.0
github.com/go-git/go-git/v5 v5.0.0
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a
github.com/go-ini/ini v1.56.0 // indirect
github.com/go-macaron/auth v0.0.0-20161228062157-884c0e6c9b92
github.com/go-openapi/jsonreference v0.19.3 // indirect
@@ -61,6 +62,7 @@ require (
github.com/gobwas/glob v0.2.3
github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28
github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14
github.com/golang/mock v1.6.0 // indirect
github.com/golang/protobuf v1.4.1 // indirect
github.com/gomodule/redigo v2.0.0+incompatible
github.com/google/go-github/v24 v24.0.1
@@ -105,7 +107,6 @@ require (
github.com/prometheus/procfs v0.0.4 // indirect
github.com/quasoft/websspi v1.0.0
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect
github.com/robfig/cron/v3 v3.0.1
github.com/satori/go.uuid v1.2.0
github.com/sergi/go-diff v1.1.0
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect
@@ -125,13 +126,12 @@ require (
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594
github.com/yuin/goldmark-meta v1.1.0
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37
golang.org/x/mod v0.3.0 // indirect
golang.org/x/net v0.0.0-20200513185701-a91f0712d120
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f
golang.org/x/text v0.3.2
golang.org/x/sys v0.0.0-20210510120138-977fb7262007
golang.org/x/text v0.3.3
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 // indirect
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53
golang.org/x/tools v0.1.1
google.golang.org/appengine v1.6.5 // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect


+ 21
- 19
go.sum View File

@@ -262,6 +262,8 @@ github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp
github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw=
github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg=
github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA=
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9yue4+QkG/HQ/W67wvtQmWJ4SDo9aK/GIno=
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw=
github.com/go-ini/ini v1.56.0 h1:6HjxSjqdmgnujDPhlzR4a44lxK3w03WPN8te0SoUSeM=
github.com/go-ini/ini v1.56.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
@@ -358,7 +360,10 @@ github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4er
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@@ -404,8 +409,8 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKpxb/jFExr4HGq6on2dEOmnL6FV+fgPw=
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0=
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
@@ -468,7 +473,6 @@ github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@@ -662,8 +666,6 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ=
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
@@ -711,14 +713,12 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx
github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w=
github.com/smartystreets/assertions v1.0.1/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
github.com/smartystreets/assertions v1.1.0 h1:MkTeG1DMwsrdH7QtLXy5W+fUxWq+vmb6cLmyJ7aRtF0=
github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo=
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8=
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
@@ -749,7 +749,6 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
@@ -804,20 +803,16 @@ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yohcop/openid-go v1.0.0 h1:EciJ7ZLETHR3wOtxBvKXx9RV6eyHZpCaSZ1inbBaUXE=
github.com/yohcop/openid-go v1.0.0/go.mod h1:/408xiwkeItSPJZSTPF7+VtZxPkPrRRpRNK2vjGh6yI=
github.com/yuin/goldmark v1.1.7/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI=
github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg=
github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg=
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg=
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU=
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo=
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60/go.mod h1:i9VhcIHN2PxXMbQrKqXNueok6QNONoPjNMoj9MygVL0=
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc=
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0=
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs=
@@ -859,14 +854,11 @@ golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79 h1:IaQbIIB2X/Mp/DKctl6ROxz1KyMlKp4uyvL6+kQ7C88=
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 h1:cg5LA/zNPRzIXIWSCxQW10Rvpy94aQh3LT/ShoCpkHw=
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a h1:gHevYm0pO4QUbwy8Dmdr01R5r1BuKtfYqRqF0h/Cbh0=
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
@@ -882,6 +874,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -913,6 +907,7 @@ golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY=
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -929,10 +924,10 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a h1:WXEvlFVvvGxCJLG6REjsT03iWnKLEWinaScsxF2Vm2o=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180824143301-4910a1d54f87/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -967,10 +962,16 @@ golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f h1:mOhmO9WsBaJCNmaZHPtHs9wOcdqdKCjF6OPJlmDM3KI=
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -1001,10 +1002,14 @@ golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWc
golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 h1:vmsb6v0zUdmUlXfwKaYrHPPRCV0lHq/IwNIf0ASGjyQ=
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
golang.org/x/tools v0.1.1 h1:wGiQel/hW0NnEkJUk8lbzkX2gFJU6PFxf1v5OlCfuOs=
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
@@ -1076,8 +1081,6 @@ gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.44.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.44.2/go.mod h1:M3Cogqpuv0QCi3ExAY5V4uOt4qb/R3xZubo9m8lK5wg=
gopkg.in/ini.v1 v1.46.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.52.0 h1:j+Lt/M1oPPejkniCg1TkWE2J3Eh1oZTsHSXzMTzUXn4=
gopkg.in/ini.v1 v1.52.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w=
@@ -1098,7 +1101,6 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=


+ 2
- 0
models/cloudbrain.go View File

@@ -116,6 +116,8 @@ const (
GrampusStatusStopped = "STOPPED"
GrampusStatusUnknown = "UNKNOWN"
GrampusStatusWaiting = "WAITING"

ModelSuffix = "models.zip"
)

const (


+ 1
- 0
models/models.go View File

@@ -161,6 +161,7 @@ func init() {
new(CloudbrainSpec),
new(CloudbrainTemp),
new(DatasetReference),
new(ScheduleRecord),
new(BadgeCategory),
new(Badge),
new(BadgeUser),


+ 70
- 0
models/schedule_record.go View File

@@ -0,0 +1,70 @@
package models

import (
"fmt"
"time"

"code.gitea.io/gitea/modules/timeutil"
)

const (
StorageScheduleSucceed int = iota
StorageScheduleProcessing
StorageScheduleFailed
StorageNoFile
StorageScheduleWaiting
)

type ScheduleRecord struct {
ID int64 `xorm:"pk autoincr"`
CloudbrainID int64 `xorm:"INDEX NOT NULL unique"`
EndPoint string `xorm:"INDEX NOT NULL"`
Bucket string `xorm:"INDEX NOT NULL"`
ObjectKey string `xorm:"INDEX NOT NULL"`
ProxyServer string `xorm:"INDEX NOT NULL"`
Status int `xorm:"INDEX NOT NULL DEFAULT 0"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
DeletedAt time.Time `xorm:"deleted"`
}

func updateScheduleCols(e Engine, record *ScheduleRecord, cols ...string) error {
_, err := e.ID(record.ID).Cols(cols...).Update(record)
return err
}

func UpdateScheduleCols(record *ScheduleRecord, cols ...string) error {
return updateScheduleCols(x, record, cols...)
}

func GetSchedulingRecord() ([]*ScheduleRecord, error) {
records := make([]*ScheduleRecord, 0, 10)
return records, x.
Where("status = ?", StorageScheduleProcessing).
Limit(100).
Find(&records)
}

func InsertScheduleRecord(record *ScheduleRecord) (_ *ScheduleRecord, err error) {

if _, err := x.Insert(record); err != nil {
return nil, err
}

return record, nil
}

func getScheduleRecordByPrID(e Engine, cloudbrainId int64) (*ScheduleRecord, error) {
record := new(ScheduleRecord)
has, err := e.Where("cloudbrain_id = ?", cloudbrainId).Get(record)
if err != nil {
return nil, err
} else if !has {
return nil, fmt.Errorf("get record by cloudbrain_id failed(%d)", cloudbrainId)
}
return record, nil
}

func GetScheduleRecordByCloudbrainID(cloudbrainId int64) (*ScheduleRecord, error) {
return getScheduleRecordByPrID(x, cloudbrainId)
}

+ 14
- 0
modules/cron/tasks_basic.go View File

@@ -5,6 +5,7 @@
package cron

import (
"code.gitea.io/gitea/modules/urfs_client/urchin"
"context"
"time"

@@ -222,6 +223,17 @@ func registerSyncCloudbrainStatus() {
})
}

func registerHandleScheduleRecord() {
RegisterTaskFatal("handle_schedule_record", &BaseConfig{
Enabled: true,
RunAtStart: false,
Schedule: "@every 1m",
}, func(ctx context.Context, _ *models.User, _ Config) error {
urchin.HandleScheduleRecords()
return nil
})
}

func registerRewardPeriodTask() {
RegisterTaskFatal("reward_period_task", &BaseConfig{
Enabled: true,
@@ -304,5 +316,7 @@ func initBasicTasks() {
registerCloudbrainPointDeductTask()

registerHandleModelSafetyTask()

registerHandleScheduleRecord()
registerHandleCloudbrainDurationStatistic()
}

+ 42
- 6
modules/grampus/grampus.go View File

@@ -1,16 +1,15 @@
package grampus

import (
"code.gitea.io/gitea/modules/cloudbrain"
"encoding/json"
"strings"

"code.gitea.io/gitea/modules/setting"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/timeutil"
)

@@ -20,10 +19,15 @@ const (
ProcessorTypeNPU = "npu.huawei.com/NPU"
ProcessorTypeGPU = "nvidia.com/gpu"

GpuWorkDir = "/tmp/"
NpuWorkDir = "/cache/"
GpuWorkDir = "/tmp/"
NpuWorkDir = "/cache/"
NpuLocalLogUrl = "/tmp/train.log"
CommandPrepareScriptNpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;"

CodeArchiveName = "master.zip"

BucketRemote = "grampus"
RemoteModelPath = "/output/" + models.ModelSuffix
)

var (
@@ -33,7 +37,7 @@ var (

SpecialPools *models.SpecialPools

CommandPrepareScript = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" +
CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" +
"echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;"
)

@@ -273,3 +277,35 @@ func InitSpecialPool() {
json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools)
}
}

func GetNpuModelRemoteObsUrl(jobName string) string {
return "s3:///" + BucketRemote + "/" + GetNpuModelObjectKey(jobName)
}

func GetNpuModelObjectKey(jobName string) string {
return setting.CodePathPrefix + jobName + RemoteModelPath
}

func GetRemoteEndPoint(aiCenterID string) string {
var endPoint string
for _, info := range setting.CenterInfos.Info {
if info.CenterID == aiCenterID {
endPoint = info.Endpoint
break
}
}

return endPoint
}

func GetCenterProxy(aiCenterID string) string {
var proxy string
for _, info := range setting.CenterInfos.Info {
if info.CenterID == aiCenterID {
proxy = info.StorageProxyServer
break
}
}

return proxy
}

+ 22
- 1
modules/setting/setting.go View File

@@ -76,6 +76,17 @@ type C2NetSqInfos struct {
C2NetSqInfo []*C2NetSequenceInfo `json:"sequence"`
}

type AiCenterInfo struct {
CenterID string `json:"center_id"`
Name string `json:"name"`
Endpoint string `json:"endpoint"`
StorageProxyServer string `json:"storage_proxy_server"`
}

type AiCenterInfos struct {
Info []*AiCenterInfo `json:"infos"`
}

type StFlavorInfos struct {
FlavorInfo []*FlavorInfo `json:"flavor_info"`
}
@@ -594,9 +605,12 @@ var (
SpecialPools string
C2NetSequence string
SyncScriptProject string
LocalCenterID string
AiCenterInfo string
}{}

C2NetInfos *C2NetSqInfos
C2NetInfos *C2NetSqInfos
CenterInfos *AiCenterInfos
C2NetMapInfo map[string]*C2NetSequenceInfo

//elk config
@@ -1647,6 +1661,13 @@ func getGrampusConfig() {
}
}
Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus")
Grampus.LocalCenterID = sec.Key("LOCAL_CENTER_ID").MustString("cloudbrain2")
Grampus.AiCenterInfo = sec.Key("AI_CENTER_INFO").MustString("")
if Grampus.AiCenterInfo != "" {
if err := json.Unmarshal([]byte(Grampus.AiCenterInfo), &CenterInfos); err != nil {
log.Error("Unmarshal(AiCenterInfo) failed:%v", err)
}
}

}



+ 23
- 27
modules/storage/obs.go View File

@@ -470,47 +470,43 @@ func GetObsListObject(jobName, outPutPath, parentDir, versionName string) ([]Fil
input := &obs.ListObjectsInput{}
input.Bucket = setting.Bucket
input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, outPutPath, versionName, parentDir), "/")
log.Info("bucket=" + input.Bucket + " Prefix=" + input.Prefix)
strPrefix := strings.Split(input.Prefix, "/")
if !strings.HasSuffix(input.Prefix, "/") {
input.Prefix += "/"
}
output, err := ObsCli.ListObjects(input)
fileInfos := make([]FileInfo, 0)
prefixLen := len(input.Prefix)
fileMap := make(map[string]bool, 0)
if err == nil {
for _, val := range output.Contents {
str1 := strings.Split(val.Key, "/")
log.Info("val key=" + val.Key)
var isDir bool
var fileName, nextParentDir string
if strings.HasSuffix(val.Key, "/") {
//dirs in next level dir
if len(str1)-len(strPrefix) > 2 {
continue
}
fileName = str1[len(str1)-2]
var fileName string
if val.Key == input.Prefix {
continue
}
fileName = val.Key[prefixLen:]
log.Info("fileName =" + fileName)
files := strings.Split(fileName, "/")
if fileMap[files[0]] {
continue
} else {
fileMap[files[0]] = true
}
ParenDir := parentDir
fileName = files[0]
if len(files) > 1 {
isDir = true
if parentDir == "" {
nextParentDir = fileName
} else {
nextParentDir = parentDir + "/" + fileName
}

if fileName == strPrefix[len(strPrefix)-1] || (fileName+"/") == outPutPath {
continue
}
ParenDir += fileName + "/"
} else {
//files in next level dir
if len(str1)-len(strPrefix) > 1 {
continue
}
fileName = str1[len(str1)-1]
isDir = false
nextParentDir = parentDir
}

fileInfo := FileInfo{
ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"),
FileName: fileName,
Size: val.Size,
IsDir: isDir,
ParenDir: nextParentDir,
ParenDir: ParenDir,
}
fileInfos = append(fileInfos, fileInfo)
}


+ 93
- 0
modules/urfs_client/config/constants.go View File

@@ -0,0 +1,93 @@
/*
* Copyright 2020 The Dragonfly Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package config

import (
"time"
)

// Reason of backing to source.
const (
BackSourceReasonNone = 0
BackSourceReasonRegisterFail = 1
BackSourceReasonMd5NotMatch = 2
BackSourceReasonDownloadError = 3
BackSourceReasonNoSpace = 4
BackSourceReasonInitError = 5
BackSourceReasonWriteError = 6
BackSourceReasonHostSysError = 7
BackSourceReasonNodeEmpty = 8
BackSourceReasonSourceError = 10
BackSourceReasonUserSpecified = 100
ForceNotBackSourceAddition = 1000
)

// Download pattern.
const (
PatternP2P = "p2p"
PatternSeedPeer = "seed-peer"
PatternSource = "source"
)

//// Download limit.
//const (
// DefaultPerPeerDownloadLimit = 20 * unit.MB
// DefaultTotalDownloadLimit = 100 * unit.MB
// DefaultUploadLimit = 100 * unit.MB
// DefaultMinRate = 20 * unit.MB
//)

// Others.
const (
DefaultTimestampFormat = "2006-01-02 15:04:05"
SchemaHTTP = "http"

DefaultTaskExpireTime = 6 * time.Hour
DefaultGCInterval = 1 * time.Minute
DefaultDaemonAliveTime = 5 * time.Minute
DefaultScheduleTimeout = 5 * time.Minute
DefaultDownloadTimeout = 5 * time.Minute

DefaultSchedulerSchema = "http"
DefaultSchedulerIP = "127.0.0.1"
DefaultSchedulerPort = 8002

DefaultPieceChanSize = 16
DefaultObjectMaxReplicas = 3
)

// Dfcache subcommand names.
const (
CmdStat = "stat"
CmdImport = "import"
CmdExport = "export"
CmdDelete = "delete"
)

// Service defalut port of listening.
const (
DefaultEndPort = 65535
DefaultPeerStartPort = 65000
DefaultUploadStartPort = 65002
DefaultObjectStorageStartPort = 65004
DefaultHealthyStartPort = 40901
)

var (
// DefaultCertValidityPeriod is default validity period of certificate.
DefaultCertValidityPeriod = 180 * 24 * time.Hour
)

+ 66
- 0
modules/urfs_client/config/dfstore.go View File

@@ -0,0 +1,66 @@
/*
* Copyright 2022 The Dragonfly Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package config

import (
"errors"
"fmt"
"net/url"
)

type DfstoreConfig struct {
// Address of the object storage service.
Endpoint string `yaml:"endpoint,omitempty" mapstructure:"endpoint,omitempty"`

// Filter is used to generate a unique Task ID by
// filtering unnecessary query params in the URL,
// it is separated by & character.
Filter string `yaml:"filter,omitempty" mapstructure:"filter,omitempty"`

// Mode is the mode in which the backend is written,
// including WriteBack and AsyncWriteBack.
Mode int `yaml:"mode,omitempty" mapstructure:"mode,omitempty"`

// MaxReplicas is the maximum number of
// replicas of an object cache in seed peers.
MaxReplicas int `yaml:"maxReplicas,omitempty" mapstructure:"mode,maxReplicas"`
}

// New dfstore configuration.
func NewDfstore() *DfstoreConfig {
url := url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", "127.0.0.1", DefaultObjectStorageStartPort),
}

return &DfstoreConfig{
Endpoint: url.String(),
MaxReplicas: DefaultObjectMaxReplicas,
}
}

func (cfg *DfstoreConfig) Validate() error {
if cfg.Endpoint == "" {
return errors.New("dfstore requires parameter endpoint")
}

if _, err := url.ParseRequestURI(cfg.Endpoint); err != nil {
return fmt.Errorf("invalid endpoint: %w", err)
}

return nil
}

+ 32
- 0
modules/urfs_client/config/headers.go View File

@@ -0,0 +1,32 @@
/*
* Copyright 2020 The Dragonfly Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package config

const (
HeaderDragonflyFilter = "X-Dragonfly-Filter"
HeaderDragonflyPeer = "X-Dragonfly-Peer"
HeaderDragonflyTask = "X-Dragonfly-Task"
HeaderDragonflyRange = "X-Dragonfly-Range"
// HeaderDragonflyTag different HeaderDragonflyTag for the same url will be divided into different P2P overlay
HeaderDragonflyTag = "X-Dragonfly-Tag"
// HeaderDragonflyApplication is used for statistics and traffic control
HeaderDragonflyApplication = "X-Dragonfly-Application"
// HeaderDragonflyRegistry is used for dynamic registry mirrors.
HeaderDragonflyRegistry = "X-Dragonfly-Registry"
// HeaderDragonflyObjectMetaDigest is used for digest of object storage.
HeaderDragonflyObjectMetaDigest = "X-Dragonfly-Object-Meta-Digest"
)

+ 307
- 0
modules/urfs_client/dfstore/dfstore.go View File

@@ -0,0 +1,307 @@
package dfstore

import (
"context"
"errors"
"fmt"
"github.com/go-http-utils/headers"
"io"
"net/http"
"net/url"
"path"
"strconv"

"code.gitea.io/gitea/modules/urfs_client/config"
pkgobjectstorage "code.gitea.io/gitea/modules/urfs_client/objectstorage"
)

// Dfstore is the interface used for object storage.
type Dfstore interface {

// GetUrfsMetadataRequestWithContext returns *http.Request of getting Urfs metadata.
GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error)

// GetUrfsMetadataWithContext returns matedata of Urfs.
GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error)

// GetUrfsRequestWithContext returns *http.Request of getting Urfs.
GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error)

// GetUrfsWithContext returns data of Urfs.
GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error)

// GetUrfsStatusRequestWithContext returns *http.Request of getting Urfs status.
GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error)

// GetUrfsStatusWithContext returns schedule status of Urfs.
GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error)
}

// dfstore provides object storage function.
type dfstore struct {
endpoint string
httpClient *http.Client
}

// Option is a functional option for configuring the dfstore.
type Option func(dfs *dfstore)

// New dfstore instance.
func New(endpoint string, options ...Option) Dfstore {
dfs := &dfstore{
endpoint: endpoint,
httpClient: http.DefaultClient,
}

for _, opt := range options {
opt(dfs)
}

return dfs
}

// GetUrfsMetadataInput is used to construct request of getting object metadata.
type GetUrfsMetadataInput struct {

// Endpoint is endpoint name.
Endpoint string

// BucketName is bucket name.
BucketName string

// ObjectKey is object key.
ObjectKey string

// DstPeer is target peerHost.
DstPeer string
}

// Validate validates GetUrfsMetadataInput fields.
func (i *GetUrfsMetadataInput) Validate() error {

if i.Endpoint == "" {
return errors.New("invalid Endpoint")

}

if i.BucketName == "" {
return errors.New("invalid BucketName")

}

if i.ObjectKey == "" {
return errors.New("invalid ObjectKey")
}

return nil
}

// GetObjectMetadataRequestWithContext returns *http.Request of getting object metadata.
func (dfs *dfstore) GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error) {
if err := input.Validate(); err != nil {
return nil, err
}

dstUrl := url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort),
}

u, err := url.Parse(dstUrl.String())
if err != nil {
return nil, err
}

u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "objects", input.ObjectKey)
req, err := http.NewRequestWithContext(ctx, http.MethodHead, u.String(), nil)
if err != nil {
return nil, err
}

return req, nil
}

// GetObjectMetadataWithContext returns metadata of object.
func (dfs *dfstore) GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error) {
req, err := dfs.GetUrfsMetadataRequestWithContext(ctx, input)
if err != nil {
return nil, err
}

resp, err := dfs.httpClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()

if resp.StatusCode/100 != 2 {
return nil, fmt.Errorf("bad response status %s", resp.Status)
}

contentLength, err := strconv.ParseInt(resp.Header.Get(headers.ContentLength), 10, 64)
if err != nil {
return nil, err
}

return &pkgobjectstorage.ObjectMetadata{
ContentDisposition: resp.Header.Get(headers.ContentDisposition),
ContentEncoding: resp.Header.Get(headers.ContentEncoding),
ContentLanguage: resp.Header.Get(headers.ContentLanguage),
ContentLength: int64(contentLength),
ContentType: resp.Header.Get(headers.ContentType),
ETag: resp.Header.Get(headers.ContentType),
Digest: resp.Header.Get(config.HeaderDragonflyObjectMetaDigest),
}, nil
}

// GetUrfsInput is used to construct request of getting object.
type GetUrfsInput struct {

// Endpoint is endpoint name.
Endpoint string

// BucketName is bucket name.
BucketName string

// ObjectKey is object key.
ObjectKey string

// Filter is used to generate a unique Task ID by
// filtering unnecessary query params in the URL,
// it is separated by & character.
Filter string

// Range is the HTTP range header.
Range string

// DstPeer is target peerHost.
DstPeer string
}

// GetObjectWithContext returns data of object.
func (dfs *dfstore) GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) {
req, err := dfs.GetUrfsRequestWithContext(ctx, input)
if err != nil {
return nil, err
}

resp, err := dfs.httpClient.Do(req)
if err != nil {
return nil, err
}

if resp.StatusCode/100 != 2 {
return nil, fmt.Errorf("bad response status %s", resp.Status)
}

return resp.Body, nil
}

// GetObjectRequestWithContext returns *http.Request of getting object.
func (dfs *dfstore) GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) {
if err := input.Validate(); err != nil {
return nil, err
}

dstUrl := url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort),
}

u, err := url.Parse(dstUrl.String())
if err != nil {
return nil, err
}

u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "cache_object", input.ObjectKey)

query := u.Query()
if input.Filter != "" {
query.Set("filter", input.Filter)
}
u.RawQuery = query.Encode()
req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), nil)
if err != nil {
return nil, err
}

if input.Range != "" {
req.Header.Set(headers.Range, input.Range)
}

return req, nil
}

// Validate validates GetUrfsInput fields.
func (i *GetUrfsInput) Validate() error {

if i.Endpoint == "" {
return errors.New("invalid Endpoint")

}

if i.BucketName == "" {
return errors.New("invalid BucketName")

}

if i.ObjectKey == "" {
return errors.New("invalid ObjectKey")
}

return nil
}

// GetUrfsStatusWithContext returns schedule task status.
func (dfs *dfstore) GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) {
req, err := dfs.GetUrfsStatusRequestWithContext(ctx, input)
if err != nil {
return nil, err
}

resp, err := dfs.httpClient.Do(req)
if err != nil {
return nil, err
}

if resp.StatusCode/100 != 2 {
return nil, fmt.Errorf("bad response status %s", resp.Status)
}

return resp.Body, nil
}

// GetObjectStatusRequestWithContext returns *http.Request of check schedule task status.
func (dfs *dfstore) GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) {
if err := input.Validate(); err != nil {
return nil, err
}

dstUrl := url.URL{
Scheme: "http",
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort),
}

u, err := url.Parse(dstUrl.String())
if err != nil {
return nil, err
}

u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "check_object", input.ObjectKey)

query := u.Query()
if input.Filter != "" {
query.Set("filter", input.Filter)
}
u.RawQuery = query.Encode()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
if err != nil {
return nil, err
}

if input.Range != "" {
req.Header.Set(headers.Range, input.Range)
}

return req, nil
}

+ 5
- 0
modules/urfs_client/objectstorage/mocks/objectstorage_mock.go View File

@@ -0,0 +1,5 @@
// Code generated by MockGen. DO NOT EDIT.
// Source: objectstorage.go

// Package mocks is a generated GoMock package.
package mocks

+ 47
- 0
modules/urfs_client/objectstorage/objectstorage.go View File

@@ -0,0 +1,47 @@
/*
* Copyright 2022 The Dragonfly Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

//go:generate mockgen -destination mocks/objectstorage_mock.go -source objectstorage.go -package mocks

package objectstorage

type ObjectMetadata struct {
// Key is object key.
Key string

// ContentDisposition is Content-Disposition header.
ContentDisposition string

// ContentEncoding is Content-Encoding header.
ContentEncoding string

// ContentLanguage is Content-Language header.
ContentLanguage string

// ContentLanguage is Content-Length header.
ContentLength int64

// ContentType is Content-Type header.
ContentType string

// ETag is ETag header.
ETag string

// Digest is object digest.
Digest string
}



+ 112
- 0
modules/urfs_client/urchin/schedule.go View File

@@ -0,0 +1,112 @@
package urchin

import (
"encoding/json"
"fmt"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/labelmsg"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)

type DecompressReq struct {
SourceFile string `json:"source_file"`
DestPath string `json:"dest_path"`
}

var urfsClient Urchinfs

func getUrfsClient() {
if urfsClient != nil {
return
}

urfsClient = New()
}

func GetBackNpuModel(cloudbrainID int64, endpoint, bucket, objectKey, destPeerHost string) error {
getUrfsClient()
res, err := urfsClient.ScheduleDataToPeerByKey(endpoint, bucket, objectKey, destPeerHost)
if err != nil {
log.Error("ScheduleDataToPeerByKey failed:%v", err)
return err
}

_, err = models.InsertScheduleRecord(&models.ScheduleRecord{
CloudbrainID: cloudbrainID,
EndPoint: res.DataEndpoint,
Bucket: res.DataRoot,
ObjectKey: res.DataPath,
ProxyServer: destPeerHost,
Status: res.StatusCode,
})
if err != nil {
log.Error("InsertScheduleRecord failed:%v", err)
return err
}

switch res.StatusCode {
case models.StorageScheduleSucceed:
log.Info("ScheduleDataToPeerByKey succeed")
decompress(res.DataRoot+"/"+res.DataPath, setting.Bucket+"/"+strings.TrimSuffix(res.DataPath, models.ModelSuffix))
case models.StorageScheduleProcessing:
log.Info("ScheduleDataToPeerByKey processing")
case models.StorageScheduleFailed:
log.Error("ScheduleDataToPeerByKey failed:%s", res.StatusMsg)
return fmt.Errorf("GetBackNpuModel failed:%s", res.StatusMsg)
default:
log.Info("ScheduleDataToPeerByKey failed, unknown StatusCode:%d", res.StatusCode)
return fmt.Errorf("GetBackNpuModel failed, unknow StatusCode:%d", res.StatusCode)
}

return nil
}

func HandleScheduleRecords() error {
getUrfsClient()
records, err := models.GetSchedulingRecord()
if err != nil {
log.Error("GetSchedulingRecord failed:%v", err)
return err
}

for _, record := range records {
res, err := urfsClient.CheckScheduleTaskStatusByKey(record.EndPoint, record.Bucket, record.ObjectKey, record.ProxyServer)
if err != nil {
log.Error("CheckScheduleTaskStatusByKey(%d) failed:%v", record.ID, err)
continue
}

record.Status = res.StatusCode
models.UpdateScheduleCols(record, "status")

switch res.StatusCode {
case models.StorageScheduleSucceed:
log.Info("ScheduleDataToPeerByKey(%s) succeed", record.ObjectKey)
decompress(record.Bucket+"/"+record.ObjectKey, setting.Bucket+"/"+strings.TrimSuffix(record.ObjectKey, models.ModelSuffix))
case models.StorageScheduleProcessing:
log.Info("ScheduleDataToPeerByKey(%s) processing", record.ObjectKey)
case models.StorageScheduleFailed:
log.Error("ScheduleDataToPeerByKey(%s) failed:%s", record.ObjectKey, res.StatusMsg)

default:
log.Info("ScheduleDataToPeerByKey(%s) failed, unknown StatusCode:%d", record.ObjectKey, res.StatusCode)
}

}

return nil
}

func decompress(sourceFile, destPath string) {
req, _ := json.Marshal(DecompressReq{
SourceFile: sourceFile,
DestPath: destPath,
})
err := labelmsg.SendDecompressAttachToLabelOBS(string(req))
if err != nil {
log.Error("SendDecompressTask to labelsystem (%s) failed:%s", sourceFile, err.Error())
}
}

+ 276
- 0
modules/urfs_client/urchin/urchinfs.go View File

@@ -0,0 +1,276 @@
package urchin

import (
"context"
"encoding/json"
"errors"
"io/ioutil"
"net/url"
"strconv"
"strings"

"code.gitea.io/gitea/modules/urfs_client/config"
urfs "code.gitea.io/gitea/modules/urfs_client/dfstore"
)

type Urchinfs interface {

//// schedule source dataset to target peer
//ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error)
//
//// check schedule data to peer task status
//CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error)

ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error)

CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error)
}

type urchinfs struct {
// Initialize default urfs config.
cfg *config.DfstoreConfig
}

// New urchinfs instance.
func New() Urchinfs {

urfs := &urchinfs{
cfg: config.NewDfstore(),
}
return urfs
}

const (
// UrfsScheme if the scheme of object storage.
UrfsScheme = "urfs"
)

/*
func (urfs *urchinfs) ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()

if err := urfs.cfg.Validate(); err != nil {
return nil, err
}

if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil {
return nil, err
}

// Copy object storage to local file.
endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl)
if err != nil {
return nil, err
}
peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost)
if err != nil {
return nil, err
}

return peerResult, err
}

*/

func (urfs *urchinfs) ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()

peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost)
if err != nil {
return nil, err
}

return peerResult, err
}

/*
func (urfs *urchinfs) CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()

if err := urfs.cfg.Validate(); err != nil {
return nil, err
}

if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil {
return nil, err
}

// Copy object storage to local file.
endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl)
if err != nil {
return nil, err
}
peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost)
if err != nil {
return nil, err
}

return peerResult, err
}

*/

func (urfs *urchinfs) CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()

peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost)
if err != nil {
return nil, err
}

return peerResult, err
}

// isUrfsURL determines whether the raw url is urfs url.
func isUrfsURL(rawURL string) bool {
u, err := url.ParseRequestURI(rawURL)
if err != nil {
return false
}

if u.Scheme != UrfsScheme || u.Host == "" || u.Path == "" {
return false
}

return true
}

// Validate copy arguments.
func validateSchedulelArgs(sourceUrl, destPeer string) error {
if !isUrfsURL(sourceUrl) {
return errors.New("source url should be urfs:// protocol")
}

return nil
}

/*
// Parse object storage url. eg: urfs://源数据$endpoint/源数据$bucket/源数据filepath
func parseUrfsURL(rawURL string) (string, string, string, error) {
u, err := url.ParseRequestURI(rawURL)
if err != nil {
return "", "", "", err
}

if u.Scheme != UrfsScheme {
return "", "", "", fmt.Errorf("invalid scheme, e.g. %s://endpoint/bucket_name/object_key", UrfsScheme)
}

if u.Host == "" {
return "", "", "", errors.New("empty endpoint name")
}

if u.Path == "" {
return "", "", "", errors.New("empty object path")
}

bucket, key, found := strings.Cut(strings.Trim(u.Path, "/"), "/")
if found == false {
return "", "", "", errors.New("invalid bucket and object key " + u.Path)
}

return u.Host, bucket, key, nil
}

*/

// Schedule object storage to peer.
func processScheduleDataToPeer(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) {
dfs := urfs.New(cfg.Endpoint)
meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{
Endpoint: endpoint,
BucketName: bucketName,
ObjectKey: objectKey,
DstPeer: dstPeer,
})
if err != nil {
return nil, err
}

reader, err := dfs.GetUrfsWithContext(ctx, &urfs.GetUrfsInput{
Endpoint: endpoint,
BucketName: bucketName,
ObjectKey: objectKey,
DstPeer: dstPeer,
})
if err != nil {
return nil, err
}
defer reader.Close()

body, err := ioutil.ReadAll(reader)

var peerResult PeerResult
if err == nil {
err = json.Unmarshal((body), &peerResult)
}
peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&")

fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64)
if err != nil {
return nil, err
}
if fileContentLength != meta.ContentLength {
return nil, errors.New("content length inconsistent with meta")
}

return &peerResult, err
}

// check schedule task status.
func processCheckScheduleTaskStatus(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) {
dfs := urfs.New(cfg.Endpoint)
meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{
Endpoint: endpoint,
BucketName: bucketName,
ObjectKey: objectKey,
DstPeer: dstPeer,
})
if err != nil {
return nil, err
}

reader, err := dfs.GetUrfsStatusWithContext(ctx, &urfs.GetUrfsInput{
Endpoint: endpoint,
BucketName: bucketName,
ObjectKey: objectKey,
DstPeer: dstPeer,
})
if err != nil {
return nil, err
}
defer reader.Close()

body, err := ioutil.ReadAll(reader)

var peerResult PeerResult
if err == nil {
err = json.Unmarshal((body), &peerResult)
}
peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&")

fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64)
if err != nil {
return nil, err
}
if fileContentLength != meta.ContentLength {
return nil, err
}
return &peerResult, err
}

type PeerResult struct {
ContentType string `json:"Content-Type"`
ContentLength string `json:"Content-Length"`
SignedUrl string
DataRoot string
DataPath string
DataEndpoint string
StatusCode int
StatusMsg string
TaskID string
}

+ 3
- 0
routers/api/v1/repo/cloudbrain.go View File

@@ -172,6 +172,7 @@ func GetCloudBrainInferenceJob(ctx *context.APIContext) {
"JobID": jobID,
"JobStatus": job.Status,
"JobDuration": job.TrainJobDuration,
"StartTime": job.StartTime,
})

}
@@ -441,6 +442,7 @@ func ModelSafetyGetLog(ctx *context.APIContext) {
"Content": result.Content,
"Lines": result.Lines,
"CanLogDownload": isCanDownloadLog(ctx, job),
"StartTime": job.StartTime,
})
}
}
@@ -601,6 +603,7 @@ func CloudbrainGetLog(ctx *context.APIContext) {
"Content": content,
"Lines": result["Lines"],
"CanLogDownload": result["FileName"] != "",
"StartTime": job.StartTime,
}
//result := CloudbrainGetLogByJobId(job.JobID, job.JobName)
ctx.JSON(http.StatusOK, re)


+ 37
- 1
routers/api/v1/repo/modelarts.go View File

@@ -12,6 +12,8 @@ import (
"strconv"
"strings"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/grampus"
@@ -49,6 +51,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) {
"JobName": job.JobName,
"JobStatus": job.Status,
"JobDuration": job.TrainJobDuration,
"StartTime": job.StartTime,
})

}
@@ -180,6 +183,11 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
}
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
if models.IsTrainJobTerminal(job.Status) {
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(job.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(job.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateTrainJobVersion(job)
if err != nil {
@@ -192,6 +200,7 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
"JobStatus": job.Status,
"JobDuration": job.TrainJobDuration,
"AiCenter": aiCenterName,
"StartTime": job.StartTime,
})

}
@@ -319,6 +328,7 @@ func TrainJobGetLog(ctx *context.APIContext) {
"Content": result.Content,
"Lines": result.Lines,
"CanLogDownload": canLogDownload,
"StartTime": task.StartTime,
})
}

@@ -458,6 +468,7 @@ func ModelList(ctx *context.APIContext) {
return
}

status := models.StorageScheduleSucceed
var fileInfos []storage.FileInfo
if task.ComputeResource == models.NPUResource {
fileInfos, err = storage.GetObsListObject(task.JobName, "output/", parentDir, versionName)
@@ -466,6 +477,30 @@ func ModelList(ctx *context.APIContext) {
ctx.ServerError("GetObsListObject:", err)
return
}

if task.Type == models.TypeC2Net {
if len(fileInfos) > 0 {
status = models.StorageScheduleSucceed
} else {
if models.IsTrainJobTerminal(task.Status) {
if task.Status == models.GrampusStatusStopped {
status = models.StorageNoFile
} else {
record, _ := models.GetScheduleRecordByCloudbrainID(task.ID)
if record != nil {
status = record.Status
if status == models.StorageScheduleSucceed {
status = models.StorageNoFile
}
} else {
status = models.StorageScheduleProcessing
}
}
} else {
status = models.StorageScheduleWaiting
}
}
}
} else if task.ComputeResource == models.GPUResource {
files, err := routerRepo.GetModelDirs(task.JobName, parentDir)
if err != nil {
@@ -485,7 +520,7 @@ func ModelList(ctx *context.APIContext) {
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobID": jobID,
"VersionName": versionName,
"StatusOK": 0,
"StatusOK": status,
"Path": dirArray,
"Dirs": fileInfos,
"task": task,
@@ -514,6 +549,7 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) {
"JobID": jobID,
"JobStatus": job.Status,
"JobDuration": job.TrainJobDuration,
"StartTime": job.StartTime,
})

}


+ 6
- 0
routers/repo/cloudbrain.go View File

@@ -2,6 +2,7 @@ package repo

import (
"bufio"
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -1943,6 +1944,11 @@ func SyncCloudbrainStatus() {
task.CorrectCreateUnix()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
if models.IsTrainJobTerminal(task.Status) {
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateJob(task)
if err != nil {


+ 22
- 11
routers/repo/grampus.go View File

@@ -1,6 +1,7 @@
package repo

import (
"code.gitea.io/gitea/modules/urfs_client/urchin"
"encoding/json"
"errors"
"fmt"
@@ -431,7 +432,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
//prepare command
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)

command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName)
command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName, "")
if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -688,7 +689,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain

//prepare command
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName)
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName)
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName, grampus.GetNpuModelRemoteObsUrl(jobName))
if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
@@ -862,7 +863,7 @@ func GrampusTrainJobShow(ctx *context.Context) {
}
oldStatus := task.Status
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)
if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning {
if task.Status != oldStatus || task.Status == models.GrampusStatusRunning {
task.Duration = result.JobInfo.RunSec
if task.Duration < 0 {
task.Duration = 0
@@ -878,6 +879,11 @@ func GrampusTrainJobShow(ctx *context.Context) {
task.CorrectCreateUnix()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
if models.IsTrainJobTerminal(task.Status) {
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateJob(task)
if err != nil {
@@ -916,7 +922,7 @@ func GrampusTrainJobShow(ctx *context.Context) {
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
ctx.Data["displayJobName"] = task.DisplayJobName

ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter,ctx)
ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx)

ctx.HTML(http.StatusOK, tplGrampusTrainJobShow)
}
@@ -972,15 +978,18 @@ func GrampusGetLog(ctx *context.Context) {
return
}

func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName string) (string, error) {
func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) {
var command string

//prepare
workDir := grampus.NpuWorkDir
if processorType == grampus.ProcessorTypeGPU {
if processorType == grampus.ProcessorTypeNPU {
command += "pwd;cd " + workDir + grampus.CommandPrepareScriptNpu
} else if processorType == grampus.ProcessorTypeGPU {
workDir = grampus.GpuWorkDir
command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScriptGpu, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject)
}

command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScript, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject)
//download code & dataset
if processorType == grampus.ProcessorTypeNPU {
//no need to download code & dataset by internet
@@ -995,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}

@@ -1029,7 +1038,8 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo

var commandCode string
if processorType == grampus.ProcessorTypeNPU {
commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py /tmp/log/train.log" + paramCode + ";"
paramCode += " --model_url=" + modelRemoteObsUrl
commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py " + grampus.NpuLocalLogUrl + paramCode + ";"
} else if processorType == grampus.ProcessorTypeGPU {
if pretrainModelFileName != "" {
paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName
@@ -1045,8 +1055,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo

//upload models
if processorType == grampus.ProcessorTypeNPU {
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_npu " + setting.Bucket + " " + outputRemotePath + " " + workDir + "output/;"
command += commandUpload
// no need to upload
} else if processorType == grampus.ProcessorTypeGPU {
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;"
command += commandUpload
@@ -1077,6 +1086,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
@@ -1085,6 +1095,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}


+ 1
- 1
templates/repo/cloudbrain/inference/show.tmpl View File

@@ -315,7 +315,7 @@

<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;" class="">
<span style="font-size: 12px;" id="{{.VersionName}}-startTime">
{{if not (eq .StartTime 0)}}
{{TimeSinceUnix1 .StartTime}}
{{else}}


+ 1
- 1
templates/repo/cloudbrain/trainjob/show.tmpl View File

@@ -331,7 +331,7 @@

<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;">
<span style="font-size: 12px;" id="{{.VersionName}}-startTime">
{{if not (eq .StartTime 0)}}
{{TimeSinceUnix1 .StartTime}}
{{else}}


+ 1
- 1
templates/repo/grampus/trainjob/show.tmpl View File

@@ -330,7 +330,7 @@

<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;" class="">
<span style="font-size: 12px;" id="{{.VersionName}}-startTime">
{{if not (eq .StartTime 0)}}
{{TimeSinceUnix1 .StartTime}}
{{else}}


+ 1
- 1
templates/repo/modelarts/inferencejob/show.tmpl View File

@@ -263,7 +263,7 @@ td, th {

<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;" class="">
<span style="font-size: 12px;" id="{{.VersionName}}-startTime">
{{if not (eq .StartTime 0)}}
{{TimeSinceUnix1 .StartTime}}
{{else}}


+ 1
- 1
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -370,7 +370,7 @@

<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;" class="">
<span style="font-size: 12px;" id="{{.VersionName}}-startTime">
{{if not (eq .Cloudbrain.StartTime 0)}}
{{TimeSinceUnix1 .Cloudbrain.StartTime}}
{{else}}


+ 0
- 0
vendor/cloud.google.com/go/LICENSE View File


+ 0
- 0
vendor/cloud.google.com/go/compute/metadata/metadata.go View File


+ 0
- 0
vendor/cloud.google.com/go/iam/iam.go View File


+ 0
- 0
vendor/cloud.google.com/go/internal/optional/optional.go View File


+ 0
- 0
vendor/cloud.google.com/go/internal/version/update_version.sh View File


+ 0
- 0
vendor/cloud.google.com/go/internal/version/version.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/README.md View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/README.md View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/doc.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/iam.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/path_funcs.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/publisher_client.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/apiv1/subscriber_client.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/debug.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/doc.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/flow_controller.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/internal/distribution/distribution.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/iterator.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/message.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/nodebug.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/pubsub.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/pullstream.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/service.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/snapshot.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/subscription.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/topic.go View File


+ 0
- 0
vendor/cloud.google.com/go/pubsub/trace.go View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/.gitignore View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/LICENSE View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/Makefile View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/README.md View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/checks/imports.go View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/checks/license.go View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/go.mod View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/go.sum View File


+ 0
- 0
vendor/gitea.com/jolheiser/gitea-vet/main.go View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/.drone.yml View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/.gitignore View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/LICENSE View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/README.md View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/error.go View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/go.mod View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/go.sum View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/queue.go View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/set.go View File


+ 0
- 0
vendor/gitea.com/lunny/levelqueue/uniquequeue.go View File


+ 0
- 0
vendor/gitea.com/macaron/binding/.drone.yml View File


+ 0
- 0
vendor/gitea.com/macaron/binding/.gitignore View File


+ 0
- 0
vendor/gitea.com/macaron/binding/LICENSE View File


+ 0
- 0
vendor/gitea.com/macaron/binding/README.md View File


+ 0
- 0
vendor/gitea.com/macaron/binding/binding.go View File


+ 0
- 0
vendor/gitea.com/macaron/binding/errors.go View File


+ 0
- 0
vendor/gitea.com/macaron/binding/go.mod View File


+ 0
- 0
vendor/gitea.com/macaron/binding/go.sum View File


+ 0
- 0
vendor/gitea.com/macaron/cache/.drone.yml View File


+ 0
- 0
vendor/gitea.com/macaron/cache/.gitignore View File


+ 0
- 0
vendor/gitea.com/macaron/cache/LICENSE View File


+ 0
- 0
vendor/gitea.com/macaron/cache/README.md View File


+ 0
- 0
vendor/gitea.com/macaron/cache/cache.go View File


+ 0
- 0
vendor/gitea.com/macaron/cache/file.go View File


+ 0
- 0
vendor/gitea.com/macaron/cache/go.mod View File


+ 0
- 0
vendor/gitea.com/macaron/cache/go.sum View File


+ 0
- 0
vendor/gitea.com/macaron/cache/memcache/memcache.go View File


+ 0
- 0
vendor/gitea.com/macaron/cache/memcache/memcache.goconvey View File


+ 0
- 0
vendor/gitea.com/macaron/cache/memory.go View File


+ 0
- 0
vendor/gitea.com/macaron/cache/redis/redis.go View File


+ 0
- 0
vendor/gitea.com/macaron/cache/redis/redis.goconvey View File


+ 0
- 0
vendor/gitea.com/macaron/cache/utils.go View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/.drone.yml View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/LICENSE View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/README.md View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/captcha.go View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/go.mod View File


+ 0
- 0
vendor/gitea.com/macaron/captcha/go.sum View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save