@@ -51,6 +51,7 @@ require ( | |||
github.com/go-enry/go-enry/v2 v2.3.0 | |||
github.com/go-git/go-billy/v5 v5.0.0 | |||
github.com/go-git/go-git/v5 v5.0.0 | |||
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a | |||
github.com/go-ini/ini v1.56.0 // indirect | |||
github.com/go-macaron/auth v0.0.0-20161228062157-884c0e6c9b92 | |||
github.com/go-openapi/jsonreference v0.19.3 // indirect | |||
@@ -61,6 +62,7 @@ require ( | |||
github.com/gobwas/glob v0.2.3 | |||
github.com/gogs/chardet v0.0.0-20191104214054-4b6791f73a28 | |||
github.com/gogs/cron v0.0.0-20171120032916-9f6c956d3e14 | |||
github.com/golang/mock v1.6.0 // indirect | |||
github.com/golang/protobuf v1.4.1 // indirect | |||
github.com/gomodule/redigo v2.0.0+incompatible | |||
github.com/google/go-github/v24 v24.0.1 | |||
@@ -105,7 +107,6 @@ require ( | |||
github.com/prometheus/procfs v0.0.4 // indirect | |||
github.com/quasoft/websspi v1.0.0 | |||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 // indirect | |||
github.com/robfig/cron/v3 v3.0.1 | |||
github.com/satori/go.uuid v1.2.0 | |||
github.com/sergi/go-diff v1.1.0 | |||
github.com/shurcooL/httpfs v0.0.0-20190527155220-6a4d4a70508b // indirect | |||
@@ -125,13 +126,12 @@ require ( | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 | |||
github.com/yuin/goldmark-meta v1.1.0 | |||
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 | |||
golang.org/x/mod v0.3.0 // indirect | |||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 | |||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 | |||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d | |||
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f | |||
golang.org/x/text v0.3.2 | |||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007 | |||
golang.org/x/text v0.3.3 | |||
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 // indirect | |||
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 | |||
golang.org/x/tools v0.1.1 | |||
google.golang.org/appengine v1.6.5 // indirect | |||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect | |||
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect | |||
@@ -262,6 +262,8 @@ github.com/go-git/go-git-fixtures/v4 v4.0.1 h1:q+IFMfLx200Q3scvt2hN79JsEzy4AmBTp | |||
github.com/go-git/go-git-fixtures/v4 v4.0.1/go.mod h1:m+ICp2rF3jDhFgEZ/8yziagdT1C+ZpZcrJjappBCDSw= | |||
github.com/go-git/go-git/v5 v5.0.0 h1:k5RWPm4iJwYtfWoxIJy4wJX9ON7ihPeZZYC1fLYDnpg= | |||
github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmClfZwtUVA= | |||
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9yue4+QkG/HQ/W67wvtQmWJ4SDo9aK/GIno= | |||
github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw= | |||
github.com/go-ini/ini v1.56.0 h1:6HjxSjqdmgnujDPhlzR4a44lxK3w03WPN8te0SoUSeM= | |||
github.com/go-ini/ini v1.56.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= | |||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= | |||
@@ -358,7 +360,10 @@ github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4er | |||
github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= | |||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | |||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= | |||
github.com/golang/mock v1.3.1 h1:qGJ6qTW+x6xX/my+8YUVl4WNpX9B7+/l2tRsHGZ7f2s= | |||
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= | |||
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= | |||
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= | |||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | |||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | |||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= | |||
@@ -404,8 +409,8 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m | |||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | |||
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | |||
github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | |||
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKpxb/jFExr4HGq6on2dEOmnL6FV+fgPw= | |||
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | |||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00 h1:l5lAOZEym3oK3SQ2HBHWsJUfbNBiTXJDeW2QDxw9AQ0= | |||
github.com/gopherjs/gopherjs v0.0.0-20200217142428-fce0ec30dd00/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= | |||
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8= | |||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= | |||
@@ -468,7 +473,6 @@ github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqx | |||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= | |||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= | |||
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | |||
github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns= | |||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | |||
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= | |||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= | |||
@@ -662,8 +666,6 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn | |||
github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= | |||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001 h1:YDeskXpkNDhPdWN3REluVa46HQOVuVkjkd2sWnrABNQ= | |||
github.com/remyoudompheng/bigfft v0.0.0-20190321074620-2f0d2b0e0001/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= | |||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= | |||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= | |||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= | |||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= | |||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= | |||
@@ -711,14 +713,12 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx | |||
github.com/sirupsen/logrus v1.5.0/go.mod h1:+F7Ogzej0PZc/94MaYx/nvG9jOFMD2osvC3s+Squfpo= | |||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | |||
github.com/smartystreets/assertions v0.0.0-20190116191733-b6c0e53d7304/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= | |||
github.com/smartystreets/assertions v1.0.1 h1:voD4ITNjPL5jjBfgR/r8fPIIBrliWrWHeiJApdr3r4w= | |||
github.com/smartystreets/assertions v1.0.1/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= | |||
github.com/smartystreets/assertions v1.1.0 h1:MkTeG1DMwsrdH7QtLXy5W+fUxWq+vmb6cLmyJ7aRtF0= | |||
github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= | |||
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM= | |||
github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= | |||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | |||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337 h1:WN9BUFbdyOsSH/XohnWpXOlq9NBD5sGAB2FciQMUEe8= | |||
github.com/smartystreets/goconvey v0.0.0-20190731233626-505e41936337/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | |||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= | |||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= | |||
@@ -749,7 +749,6 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ | |||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= | |||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= | |||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= | |||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= | |||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= | |||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= | |||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= | |||
@@ -804,20 +803,16 @@ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q | |||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= | |||
github.com/yohcop/openid-go v1.0.0 h1:EciJ7ZLETHR3wOtxBvKXx9RV6eyHZpCaSZ1inbBaUXE= | |||
github.com/yohcop/openid-go v1.0.0/go.mod h1:/408xiwkeItSPJZSTPF7+VtZxPkPrRRpRNK2vjGh6yI= | |||
github.com/yuin/goldmark v1.1.7/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.1.27 h1:nqDD4MMMQA0lmWq03Z2/myGPYLQoXtmi0rGVs95ntbo= | |||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.1.30 h1:j4d4Lw3zqZelDhBksEo3BnWg9xhXRQGJPPSL6OApZjI= | |||
github.com/yuin/goldmark v1.1.30/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= | |||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= | |||
github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | |||
github.com/yuin/goldmark v1.4.6/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg= | |||
github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= | |||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg= | |||
github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU= | |||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60 h1:gZucqLjL1eDzVWrXj4uiWeMbAopJlBR2mKQAsTGdPwo= | |||
github.com/yuin/goldmark-meta v0.0.0-20191126180153-f0638e958b60/go.mod h1:i9VhcIHN2PxXMbQrKqXNueok6QNONoPjNMoj9MygVL0= | |||
github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= | |||
github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= | |||
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs= | |||
@@ -859,14 +854,11 @@ golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8U | |||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= | |||
golang.org/x/crypto v0.0.0-20191219195013-becbf705a915/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | |||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | |||
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79 h1:IaQbIIB2X/Mp/DKctl6ROxz1KyMlKp4uyvL6+kQ7C88= | |||
golang.org/x/crypto v0.0.0-20200429183012-4b2356b1ed79/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | |||
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37 h1:cg5LA/zNPRzIXIWSCxQW10Rvpy94aQh3LT/ShoCpkHw= | |||
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= | |||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= | |||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= | |||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= | |||
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a h1:gHevYm0pO4QUbwy8Dmdr01R5r1BuKtfYqRqF0h/Cbh0= | |||
golang.org/x/image v0.0.0-20190910094157-69e4b8554b2a/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | |||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8 h1:hVwzHzIUGRjiF7EcUjqNxk3NCfkPxbDKRdnNE1Rpg0U= | |||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= | |||
@@ -882,6 +874,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB | |||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | |||
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= | |||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | |||
golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= | |||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= | |||
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | |||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | |||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= | |||
@@ -913,6 +907,8 @@ golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLL | |||
golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | |||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY= | |||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= | |||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0= | |||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= | |||
golang.org/x/oauth2 v0.0.0-20180620175406-ef147856a6dd/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | |||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | |||
golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= | |||
@@ -929,10 +925,11 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ | |||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e h1:vcxGaoTs7kV8m5Np9uUNQin4BrLOthgV7252N8V+FwY= | |||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a h1:WXEvlFVvvGxCJLG6REjsT03iWnKLEWinaScsxF2Vm2o= | |||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= | |||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= | |||
golang.org/x/sys v0.0.0-20180824143301-4910a1d54f87/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= | |||
@@ -967,10 +964,17 @@ golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7w | |||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f h1:mOhmO9WsBaJCNmaZHPtHs9wOcdqdKCjF6OPJlmDM3KI= | |||
golang.org/x/sys v0.0.0-20200509044756-6aff5f38e54f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= | |||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= | |||
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= | |||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= | |||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | |||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= | |||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= | |||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= | |||
golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= | |||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= | |||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | |||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | |||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= | |||
@@ -1001,10 +1005,14 @@ golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWc | |||
golang.org/x/tools v0.0.0-20200509030707-2212a7e161a5/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | |||
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53 h1:vmsb6v0zUdmUlXfwKaYrHPPRCV0lHq/IwNIf0ASGjyQ= | |||
golang.org/x/tools v0.0.0-20200515220128-d3bf790afa53/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= | |||
golang.org/x/tools v0.1.1 h1:wGiQel/hW0NnEkJUk8lbzkX2gFJU6PFxf1v5OlCfuOs= | |||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= | |||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | |||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | |||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= | |||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | |||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= | |||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= | |||
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | |||
google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | |||
google.golang.org/api v0.0.0-20181220000619-583d854617af/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= | |||
@@ -1076,8 +1084,6 @@ gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||
gopkg.in/ini.v1 v1.44.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||
gopkg.in/ini.v1 v1.44.2/go.mod h1:M3Cogqpuv0QCi3ExAY5V4uOt4qb/R3xZubo9m8lK5wg= | |||
gopkg.in/ini.v1 v1.46.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||
gopkg.in/ini.v1 v1.52.0 h1:j+Lt/M1oPPejkniCg1TkWE2J3Eh1oZTsHSXzMTzUXn4= | |||
gopkg.in/ini.v1 v1.52.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||
gopkg.in/ini.v1 v1.56.0 h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y= | |||
gopkg.in/ini.v1 v1.56.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= | |||
gopkg.in/ldap.v3 v3.0.2 h1:R6RBtabK6e1GO0eQKtkyOFbAHO73QesLzI2w2DZ6b9w= | |||
@@ -1098,7 +1104,6 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl | |||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= | |||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= | |||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= | |||
@@ -116,6 +116,8 @@ const ( | |||
GrampusStatusStopped = "STOPPED" | |||
GrampusStatusUnknown = "UNKNOWN" | |||
GrampusStatusWaiting = "WAITING" | |||
ModelSuffix = "models.zip" | |||
) | |||
const ( | |||
@@ -161,6 +161,7 @@ func init() { | |||
new(CloudbrainSpec), | |||
new(CloudbrainTemp), | |||
new(DatasetReference), | |||
new(ScheduleRecord), | |||
new(BadgeCategory), | |||
new(Badge), | |||
new(BadgeUser), | |||
@@ -454,6 +454,7 @@ func (repo *Repository) innerAPIFormat(e Engine, mode AccessMode, isParent bool) | |||
AllowRebaseMerge: allowRebaseMerge, | |||
AllowSquash: allowSquash, | |||
AvatarURL: repo.avatarLink(e), | |||
Status: int(repo.Status), | |||
} | |||
} | |||
@@ -249,22 +249,23 @@ type AdminRewardOperateReq struct { | |||
} | |||
type RewardOperateRecordShow struct { | |||
SerialNo string | |||
Status string | |||
OperateType string | |||
SourceId string | |||
Amount int64 | |||
LossAmount int64 | |||
BalanceAfter int64 | |||
Remark string | |||
SourceType string | |||
UserName string | |||
LastOperateDate timeutil.TimeStamp | |||
UnitPrice int64 | |||
SuccessCount int | |||
Action *ActionShow | |||
Cloudbrain *CloudbrainShow | |||
AdminLog *RewardAdminLogShow | |||
SerialNo string | |||
Status string | |||
OperateType string | |||
SourceId string | |||
Amount int64 | |||
LossAmount int64 | |||
BalanceAfter int64 | |||
Remark string | |||
SourceType string | |||
SourceTemplateId string | |||
UserName string | |||
LastOperateDate timeutil.TimeStamp | |||
UnitPrice int64 | |||
SuccessCount int | |||
Action *ActionShow | |||
Cloudbrain *CloudbrainShow | |||
AdminLog *RewardAdminLogShow | |||
} | |||
func getPointOperateRecord(tl *RewardOperateRecord) (*RewardOperateRecord, error) { | |||
@@ -419,7 +420,7 @@ func GetRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList, | |||
r := make([]*RewardOperateRecordShow, 0) | |||
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | |||
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||
"reward_operate_record.last_operate_unix as last_operate_date"). | |||
Where(cond).Limit(opts.PageSize, (opts.Page-1)*opts.PageSize).OrderBy(string(opts.OrderBy)).Find(&r) | |||
@@ -441,7 +442,7 @@ func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowL | |||
case OperateTypeIncrease: | |||
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | |||
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||
"reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | |||
"point_account_log.balance_after"). | |||
Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | |||
@@ -450,7 +451,7 @@ func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowL | |||
case OperateTypeDecrease: | |||
err = x.Table("reward_operate_record").Cols("reward_operate_record.source_id", "reward_operate_record.serial_no", | |||
"reward_operate_record.status", "reward_operate_record.operate_type", "reward_operate_record.amount", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", | |||
"reward_operate_record.loss_amount", "reward_operate_record.remark", "reward_operate_record.source_type", "reward_operate_record.source_template_id", | |||
"reward_operate_record.last_operate_unix as last_operate_date", "public.user.name as user_name", | |||
"reward_periodic_task.amount as unit_price", "reward_periodic_task.success_count"). | |||
Join("LEFT", "public.user", "reward_operate_record.user_id = public.user.id"). | |||
@@ -0,0 +1,70 @@ | |||
package models | |||
import ( | |||
"fmt" | |||
"time" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
) | |||
const ( | |||
StorageScheduleSucceed int = iota | |||
StorageScheduleProcessing | |||
StorageScheduleFailed | |||
StorageNoFile | |||
StorageScheduleWaiting | |||
) | |||
type ScheduleRecord struct { | |||
ID int64 `xorm:"pk autoincr"` | |||
CloudbrainID int64 `xorm:"INDEX NOT NULL unique"` | |||
EndPoint string `xorm:"INDEX NOT NULL"` | |||
Bucket string `xorm:"INDEX NOT NULL"` | |||
ObjectKey string `xorm:"INDEX NOT NULL"` | |||
ProxyServer string `xorm:"INDEX NOT NULL"` | |||
Status int `xorm:"INDEX NOT NULL DEFAULT 0"` | |||
CreatedUnix timeutil.TimeStamp `xorm:"created"` | |||
UpdatedUnix timeutil.TimeStamp `xorm:"updated"` | |||
DeletedAt time.Time `xorm:"deleted"` | |||
} | |||
func updateScheduleCols(e Engine, record *ScheduleRecord, cols ...string) error { | |||
_, err := e.ID(record.ID).Cols(cols...).Update(record) | |||
return err | |||
} | |||
func UpdateScheduleCols(record *ScheduleRecord, cols ...string) error { | |||
return updateScheduleCols(x, record, cols...) | |||
} | |||
func GetSchedulingRecord() ([]*ScheduleRecord, error) { | |||
records := make([]*ScheduleRecord, 0, 10) | |||
return records, x. | |||
Where("status = ?", StorageScheduleProcessing). | |||
Limit(100). | |||
Find(&records) | |||
} | |||
func InsertScheduleRecord(record *ScheduleRecord) (_ *ScheduleRecord, err error) { | |||
if _, err := x.Insert(record); err != nil { | |||
return nil, err | |||
} | |||
return record, nil | |||
} | |||
func getScheduleRecordByPrID(e Engine, cloudbrainId int64) (*ScheduleRecord, error) { | |||
record := new(ScheduleRecord) | |||
has, err := e.Where("cloudbrain_id = ?", cloudbrainId).Get(record) | |||
if err != nil { | |||
return nil, err | |||
} else if !has { | |||
return nil, fmt.Errorf("get record by cloudbrain_id failed(%d)", cloudbrainId) | |||
} | |||
return record, nil | |||
} | |||
func GetScheduleRecordByCloudbrainID(cloudbrainId int64) (*ScheduleRecord, error) { | |||
return getScheduleRecordByPrID(x, cloudbrainId) | |||
} |
@@ -311,6 +311,7 @@ func ToOrganization(org *models.User) *api.Organization { | |||
Location: org.Location, | |||
Visibility: org.Visibility.String(), | |||
RepoAdminChangeTeamAccess: org.RepoAdminChangeTeamAccess, | |||
NumRepos: org.NumRepos, | |||
} | |||
} | |||
@@ -5,6 +5,7 @@ | |||
package cron | |||
import ( | |||
"code.gitea.io/gitea/modules/urfs_client/urchin" | |||
"context" | |||
"time" | |||
@@ -222,6 +223,17 @@ func registerSyncCloudbrainStatus() { | |||
}) | |||
} | |||
func registerHandleScheduleRecord() { | |||
RegisterTaskFatal("handle_schedule_record", &BaseConfig{ | |||
Enabled: true, | |||
RunAtStart: false, | |||
Schedule: "@every 1m", | |||
}, func(ctx context.Context, _ *models.User, _ Config) error { | |||
urchin.HandleScheduleRecords() | |||
return nil | |||
}) | |||
} | |||
func registerRewardPeriodTask() { | |||
RegisterTaskFatal("reward_period_task", &BaseConfig{ | |||
Enabled: true, | |||
@@ -304,5 +316,7 @@ func initBasicTasks() { | |||
registerCloudbrainPointDeductTask() | |||
registerHandleModelSafetyTask() | |||
registerHandleScheduleRecord() | |||
registerHandleCloudbrainDurationStatistic() | |||
} |
@@ -1,16 +1,15 @@ | |||
package grampus | |||
import ( | |||
"code.gitea.io/gitea/modules/cloudbrain" | |||
"encoding/json" | |||
"strings" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/models" | |||
"code.gitea.io/gitea/modules/cloudbrain" | |||
"code.gitea.io/gitea/modules/context" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/notification" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
) | |||
@@ -20,10 +19,15 @@ const ( | |||
ProcessorTypeNPU = "npu.huawei.com/NPU" | |||
ProcessorTypeGPU = "nvidia.com/gpu" | |||
GpuWorkDir = "/tmp/" | |||
NpuWorkDir = "/cache/" | |||
GpuWorkDir = "/tmp/" | |||
NpuWorkDir = "/cache/" | |||
NpuLocalLogUrl = "/tmp/train.log" | |||
CommandPrepareScriptNpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;" | |||
CodeArchiveName = "master.zip" | |||
BucketRemote = "grampus" | |||
RemoteModelPath = "/output/" + models.ModelSuffix | |||
) | |||
var ( | |||
@@ -33,7 +37,7 @@ var ( | |||
SpecialPools *models.SpecialPools | |||
CommandPrepareScript = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + | |||
CommandPrepareScriptGpu = ";mkdir -p output;mkdir -p code;mkdir -p dataset;mkdir -p pretrainmodel;echo \"start loading script\";wget -q https://git.openi.org.cn/OpenIOSSG/%s/archive/master.zip;" + | |||
"echo \"finish loading script\";unzip -q master.zip;cd %s;chmod 777 downloader_for_obs uploader_for_npu downloader_for_minio uploader_for_gpu;" | |||
) | |||
@@ -273,3 +277,35 @@ func InitSpecialPool() { | |||
json.Unmarshal([]byte(setting.Grampus.SpecialPools), &SpecialPools) | |||
} | |||
} | |||
func GetNpuModelRemoteObsUrl(jobName string) string { | |||
return "s3:///" + BucketRemote + "/" + GetNpuModelObjectKey(jobName) | |||
} | |||
func GetNpuModelObjectKey(jobName string) string { | |||
return setting.CodePathPrefix + jobName + RemoteModelPath | |||
} | |||
func GetRemoteEndPoint(aiCenterID string) string { | |||
var endPoint string | |||
for _, info := range setting.CenterInfos.Info { | |||
if info.CenterID == aiCenterID { | |||
endPoint = info.Endpoint | |||
break | |||
} | |||
} | |||
return endPoint | |||
} | |||
func GetCenterProxy(aiCenterID string) string { | |||
var proxy string | |||
for _, info := range setting.CenterInfos.Info { | |||
if info.CenterID == aiCenterID { | |||
proxy = info.StorageProxyServer | |||
break | |||
} | |||
} | |||
return proxy | |||
} |
@@ -76,6 +76,17 @@ type C2NetSqInfos struct { | |||
C2NetSqInfo []*C2NetSequenceInfo `json:"sequence"` | |||
} | |||
type AiCenterInfo struct { | |||
CenterID string `json:"center_id"` | |||
Name string `json:"name"` | |||
Endpoint string `json:"endpoint"` | |||
StorageProxyServer string `json:"storage_proxy_server"` | |||
} | |||
type AiCenterInfos struct { | |||
Info []*AiCenterInfo `json:"infos"` | |||
} | |||
type StFlavorInfos struct { | |||
FlavorInfo []*FlavorInfo `json:"flavor_info"` | |||
} | |||
@@ -594,9 +605,12 @@ var ( | |||
SpecialPools string | |||
C2NetSequence string | |||
SyncScriptProject string | |||
LocalCenterID string | |||
AiCenterInfo string | |||
}{} | |||
C2NetInfos *C2NetSqInfos | |||
C2NetInfos *C2NetSqInfos | |||
CenterInfos *AiCenterInfos | |||
C2NetMapInfo map[string]*C2NetSequenceInfo | |||
//elk config | |||
@@ -1647,6 +1661,13 @@ func getGrampusConfig() { | |||
} | |||
} | |||
Grampus.SyncScriptProject = sec.Key("SYNC_SCRIPT_PROJECT").MustString("script_for_grampus") | |||
Grampus.LocalCenterID = sec.Key("LOCAL_CENTER_ID").MustString("cloudbrain2") | |||
Grampus.AiCenterInfo = sec.Key("AI_CENTER_INFO").MustString("") | |||
if Grampus.AiCenterInfo != "" { | |||
if err := json.Unmarshal([]byte(Grampus.AiCenterInfo), &CenterInfos); err != nil { | |||
log.Error("Unmarshal(AiCenterInfo) failed:%v", err) | |||
} | |||
} | |||
} | |||
@@ -470,47 +470,43 @@ func GetObsListObject(jobName, outPutPath, parentDir, versionName string) ([]Fil | |||
input := &obs.ListObjectsInput{} | |||
input.Bucket = setting.Bucket | |||
input.Prefix = strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, outPutPath, versionName, parentDir), "/") | |||
log.Info("bucket=" + input.Bucket + " Prefix=" + input.Prefix) | |||
strPrefix := strings.Split(input.Prefix, "/") | |||
if !strings.HasSuffix(input.Prefix, "/") { | |||
input.Prefix += "/" | |||
} | |||
output, err := ObsCli.ListObjects(input) | |||
fileInfos := make([]FileInfo, 0) | |||
prefixLen := len(input.Prefix) | |||
fileMap := make(map[string]bool, 0) | |||
if err == nil { | |||
for _, val := range output.Contents { | |||
str1 := strings.Split(val.Key, "/") | |||
log.Info("val key=" + val.Key) | |||
var isDir bool | |||
var fileName, nextParentDir string | |||
if strings.HasSuffix(val.Key, "/") { | |||
//dirs in next level dir | |||
if len(str1)-len(strPrefix) > 2 { | |||
continue | |||
} | |||
fileName = str1[len(str1)-2] | |||
var fileName string | |||
if val.Key == input.Prefix { | |||
continue | |||
} | |||
fileName = val.Key[prefixLen:] | |||
log.Info("fileName =" + fileName) | |||
files := strings.Split(fileName, "/") | |||
if fileMap[files[0]] { | |||
continue | |||
} else { | |||
fileMap[files[0]] = true | |||
} | |||
ParenDir := parentDir | |||
fileName = files[0] | |||
if len(files) > 1 { | |||
isDir = true | |||
if parentDir == "" { | |||
nextParentDir = fileName | |||
} else { | |||
nextParentDir = parentDir + "/" + fileName | |||
} | |||
if fileName == strPrefix[len(strPrefix)-1] || (fileName+"/") == outPutPath { | |||
continue | |||
} | |||
ParenDir += fileName + "/" | |||
} else { | |||
//files in next level dir | |||
if len(str1)-len(strPrefix) > 1 { | |||
continue | |||
} | |||
fileName = str1[len(str1)-1] | |||
isDir = false | |||
nextParentDir = parentDir | |||
} | |||
fileInfo := FileInfo{ | |||
ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | |||
FileName: fileName, | |||
Size: val.Size, | |||
IsDir: isDir, | |||
ParenDir: nextParentDir, | |||
ParenDir: ParenDir, | |||
} | |||
fileInfos = append(fileInfos, fileInfo) | |||
} | |||
@@ -15,6 +15,7 @@ type Organization struct { | |||
Location string `json:"location"` | |||
Visibility string `json:"visibility"` | |||
RepoAdminChangeTeamAccess bool `json:"repo_admin_change_team_access"` | |||
NumRepos int `json:"num_repos"` | |||
} | |||
// CreateOrgOption options for creating an organization | |||
@@ -90,6 +90,7 @@ type Repository struct { | |||
AllowRebaseMerge bool `json:"allow_rebase_explicit"` | |||
AllowSquash bool `json:"allow_squash_merge"` | |||
AvatarURL string `json:"avatar_url"` | |||
Status int `json:"status"` | |||
} | |||
// CreateRepoOption options when creating repository | |||
@@ -0,0 +1,93 @@ | |||
/* | |||
* Copyright 2020 The Dragonfly Authors | |||
* | |||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||
* you may not use this file except in compliance with the License. | |||
* You may obtain a copy of the License at | |||
* | |||
* http://www.apache.org/licenses/LICENSE-2.0 | |||
* | |||
* Unless required by applicable law or agreed to in writing, software | |||
* distributed under the License is distributed on an "AS IS" BASIS, | |||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
* See the License for the specific language governing permissions and | |||
* limitations under the License. | |||
*/ | |||
package config | |||
import ( | |||
"time" | |||
) | |||
// Reason of backing to source. | |||
const ( | |||
BackSourceReasonNone = 0 | |||
BackSourceReasonRegisterFail = 1 | |||
BackSourceReasonMd5NotMatch = 2 | |||
BackSourceReasonDownloadError = 3 | |||
BackSourceReasonNoSpace = 4 | |||
BackSourceReasonInitError = 5 | |||
BackSourceReasonWriteError = 6 | |||
BackSourceReasonHostSysError = 7 | |||
BackSourceReasonNodeEmpty = 8 | |||
BackSourceReasonSourceError = 10 | |||
BackSourceReasonUserSpecified = 100 | |||
ForceNotBackSourceAddition = 1000 | |||
) | |||
// Download pattern. | |||
const ( | |||
PatternP2P = "p2p" | |||
PatternSeedPeer = "seed-peer" | |||
PatternSource = "source" | |||
) | |||
//// Download limit. | |||
//const ( | |||
// DefaultPerPeerDownloadLimit = 20 * unit.MB | |||
// DefaultTotalDownloadLimit = 100 * unit.MB | |||
// DefaultUploadLimit = 100 * unit.MB | |||
// DefaultMinRate = 20 * unit.MB | |||
//) | |||
// Others. | |||
const ( | |||
DefaultTimestampFormat = "2006-01-02 15:04:05" | |||
SchemaHTTP = "http" | |||
DefaultTaskExpireTime = 6 * time.Hour | |||
DefaultGCInterval = 1 * time.Minute | |||
DefaultDaemonAliveTime = 5 * time.Minute | |||
DefaultScheduleTimeout = 5 * time.Minute | |||
DefaultDownloadTimeout = 5 * time.Minute | |||
DefaultSchedulerSchema = "http" | |||
DefaultSchedulerIP = "127.0.0.1" | |||
DefaultSchedulerPort = 8002 | |||
DefaultPieceChanSize = 16 | |||
DefaultObjectMaxReplicas = 3 | |||
) | |||
// Dfcache subcommand names. | |||
const ( | |||
CmdStat = "stat" | |||
CmdImport = "import" | |||
CmdExport = "export" | |||
CmdDelete = "delete" | |||
) | |||
// Service defalut port of listening. | |||
const ( | |||
DefaultEndPort = 65535 | |||
DefaultPeerStartPort = 65000 | |||
DefaultUploadStartPort = 65002 | |||
DefaultObjectStorageStartPort = 65004 | |||
DefaultHealthyStartPort = 40901 | |||
) | |||
var ( | |||
// DefaultCertValidityPeriod is default validity period of certificate. | |||
DefaultCertValidityPeriod = 180 * 24 * time.Hour | |||
) |
@@ -0,0 +1,66 @@ | |||
/* | |||
* Copyright 2022 The Dragonfly Authors | |||
* | |||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||
* you may not use this file except in compliance with the License. | |||
* You may obtain a copy of the License at | |||
* | |||
* http://www.apache.org/licenses/LICENSE-2.0 | |||
* | |||
* Unless required by applicable law or agreed to in writing, software | |||
* distributed under the License is distributed on an "AS IS" BASIS, | |||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
* See the License for the specific language governing permissions and | |||
* limitations under the License. | |||
*/ | |||
package config | |||
import ( | |||
"errors" | |||
"fmt" | |||
"net/url" | |||
) | |||
type DfstoreConfig struct { | |||
// Address of the object storage service. | |||
Endpoint string `yaml:"endpoint,omitempty" mapstructure:"endpoint,omitempty"` | |||
// Filter is used to generate a unique Task ID by | |||
// filtering unnecessary query params in the URL, | |||
// it is separated by & character. | |||
Filter string `yaml:"filter,omitempty" mapstructure:"filter,omitempty"` | |||
// Mode is the mode in which the backend is written, | |||
// including WriteBack and AsyncWriteBack. | |||
Mode int `yaml:"mode,omitempty" mapstructure:"mode,omitempty"` | |||
// MaxReplicas is the maximum number of | |||
// replicas of an object cache in seed peers. | |||
MaxReplicas int `yaml:"maxReplicas,omitempty" mapstructure:"mode,maxReplicas"` | |||
} | |||
// New dfstore configuration. | |||
func NewDfstore() *DfstoreConfig { | |||
url := url.URL{ | |||
Scheme: "http", | |||
Host: fmt.Sprintf("%s:%d", "127.0.0.1", DefaultObjectStorageStartPort), | |||
} | |||
return &DfstoreConfig{ | |||
Endpoint: url.String(), | |||
MaxReplicas: DefaultObjectMaxReplicas, | |||
} | |||
} | |||
func (cfg *DfstoreConfig) Validate() error { | |||
if cfg.Endpoint == "" { | |||
return errors.New("dfstore requires parameter endpoint") | |||
} | |||
if _, err := url.ParseRequestURI(cfg.Endpoint); err != nil { | |||
return fmt.Errorf("invalid endpoint: %w", err) | |||
} | |||
return nil | |||
} |
@@ -0,0 +1,32 @@ | |||
/* | |||
* Copyright 2020 The Dragonfly Authors | |||
* | |||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||
* you may not use this file except in compliance with the License. | |||
* You may obtain a copy of the License at | |||
* | |||
* http://www.apache.org/licenses/LICENSE-2.0 | |||
* | |||
* Unless required by applicable law or agreed to in writing, software | |||
* distributed under the License is distributed on an "AS IS" BASIS, | |||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
* See the License for the specific language governing permissions and | |||
* limitations under the License. | |||
*/ | |||
package config | |||
const ( | |||
HeaderDragonflyFilter = "X-Dragonfly-Filter" | |||
HeaderDragonflyPeer = "X-Dragonfly-Peer" | |||
HeaderDragonflyTask = "X-Dragonfly-Task" | |||
HeaderDragonflyRange = "X-Dragonfly-Range" | |||
// HeaderDragonflyTag different HeaderDragonflyTag for the same url will be divided into different P2P overlay | |||
HeaderDragonflyTag = "X-Dragonfly-Tag" | |||
// HeaderDragonflyApplication is used for statistics and traffic control | |||
HeaderDragonflyApplication = "X-Dragonfly-Application" | |||
// HeaderDragonflyRegistry is used for dynamic registry mirrors. | |||
HeaderDragonflyRegistry = "X-Dragonfly-Registry" | |||
// HeaderDragonflyObjectMetaDigest is used for digest of object storage. | |||
HeaderDragonflyObjectMetaDigest = "X-Dragonfly-Object-Meta-Digest" | |||
) |
@@ -0,0 +1,307 @@ | |||
package dfstore | |||
import ( | |||
"context" | |||
"errors" | |||
"fmt" | |||
"github.com/go-http-utils/headers" | |||
"io" | |||
"net/http" | |||
"net/url" | |||
"path" | |||
"strconv" | |||
"code.gitea.io/gitea/modules/urfs_client/config" | |||
pkgobjectstorage "code.gitea.io/gitea/modules/urfs_client/objectstorage" | |||
) | |||
// Dfstore is the interface used for object storage. | |||
type Dfstore interface { | |||
// GetUrfsMetadataRequestWithContext returns *http.Request of getting Urfs metadata. | |||
GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error) | |||
// GetUrfsMetadataWithContext returns matedata of Urfs. | |||
GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error) | |||
// GetUrfsRequestWithContext returns *http.Request of getting Urfs. | |||
GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) | |||
// GetUrfsWithContext returns data of Urfs. | |||
GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) | |||
// GetUrfsStatusRequestWithContext returns *http.Request of getting Urfs status. | |||
GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) | |||
// GetUrfsStatusWithContext returns schedule status of Urfs. | |||
GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) | |||
} | |||
// dfstore provides object storage function. | |||
type dfstore struct { | |||
endpoint string | |||
httpClient *http.Client | |||
} | |||
// Option is a functional option for configuring the dfstore. | |||
type Option func(dfs *dfstore) | |||
// New dfstore instance. | |||
func New(endpoint string, options ...Option) Dfstore { | |||
dfs := &dfstore{ | |||
endpoint: endpoint, | |||
httpClient: http.DefaultClient, | |||
} | |||
for _, opt := range options { | |||
opt(dfs) | |||
} | |||
return dfs | |||
} | |||
// GetUrfsMetadataInput is used to construct request of getting object metadata. | |||
type GetUrfsMetadataInput struct { | |||
// Endpoint is endpoint name. | |||
Endpoint string | |||
// BucketName is bucket name. | |||
BucketName string | |||
// ObjectKey is object key. | |||
ObjectKey string | |||
// DstPeer is target peerHost. | |||
DstPeer string | |||
} | |||
// Validate validates GetUrfsMetadataInput fields. | |||
func (i *GetUrfsMetadataInput) Validate() error { | |||
if i.Endpoint == "" { | |||
return errors.New("invalid Endpoint") | |||
} | |||
if i.BucketName == "" { | |||
return errors.New("invalid BucketName") | |||
} | |||
if i.ObjectKey == "" { | |||
return errors.New("invalid ObjectKey") | |||
} | |||
return nil | |||
} | |||
// GetObjectMetadataRequestWithContext returns *http.Request of getting object metadata. | |||
func (dfs *dfstore) GetUrfsMetadataRequestWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*http.Request, error) { | |||
if err := input.Validate(); err != nil { | |||
return nil, err | |||
} | |||
dstUrl := url.URL{ | |||
Scheme: "http", | |||
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||
} | |||
u, err := url.Parse(dstUrl.String()) | |||
if err != nil { | |||
return nil, err | |||
} | |||
u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "objects", input.ObjectKey) | |||
req, err := http.NewRequestWithContext(ctx, http.MethodHead, u.String(), nil) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return req, nil | |||
} | |||
// GetObjectMetadataWithContext returns metadata of object. | |||
func (dfs *dfstore) GetUrfsMetadataWithContext(ctx context.Context, input *GetUrfsMetadataInput) (*pkgobjectstorage.ObjectMetadata, error) { | |||
req, err := dfs.GetUrfsMetadataRequestWithContext(ctx, input) | |||
if err != nil { | |||
return nil, err | |||
} | |||
resp, err := dfs.httpClient.Do(req) | |||
if err != nil { | |||
return nil, err | |||
} | |||
defer resp.Body.Close() | |||
if resp.StatusCode/100 != 2 { | |||
return nil, fmt.Errorf("bad response status %s", resp.Status) | |||
} | |||
contentLength, err := strconv.ParseInt(resp.Header.Get(headers.ContentLength), 10, 64) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return &pkgobjectstorage.ObjectMetadata{ | |||
ContentDisposition: resp.Header.Get(headers.ContentDisposition), | |||
ContentEncoding: resp.Header.Get(headers.ContentEncoding), | |||
ContentLanguage: resp.Header.Get(headers.ContentLanguage), | |||
ContentLength: int64(contentLength), | |||
ContentType: resp.Header.Get(headers.ContentType), | |||
ETag: resp.Header.Get(headers.ContentType), | |||
Digest: resp.Header.Get(config.HeaderDragonflyObjectMetaDigest), | |||
}, nil | |||
} | |||
// GetUrfsInput is used to construct request of getting object. | |||
type GetUrfsInput struct { | |||
// Endpoint is endpoint name. | |||
Endpoint string | |||
// BucketName is bucket name. | |||
BucketName string | |||
// ObjectKey is object key. | |||
ObjectKey string | |||
// Filter is used to generate a unique Task ID by | |||
// filtering unnecessary query params in the URL, | |||
// it is separated by & character. | |||
Filter string | |||
// Range is the HTTP range header. | |||
Range string | |||
// DstPeer is target peerHost. | |||
DstPeer string | |||
} | |||
// GetObjectWithContext returns data of object. | |||
func (dfs *dfstore) GetUrfsWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) { | |||
req, err := dfs.GetUrfsRequestWithContext(ctx, input) | |||
if err != nil { | |||
return nil, err | |||
} | |||
resp, err := dfs.httpClient.Do(req) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if resp.StatusCode/100 != 2 { | |||
return nil, fmt.Errorf("bad response status %s", resp.Status) | |||
} | |||
return resp.Body, nil | |||
} | |||
// GetObjectRequestWithContext returns *http.Request of getting object. | |||
func (dfs *dfstore) GetUrfsRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) { | |||
if err := input.Validate(); err != nil { | |||
return nil, err | |||
} | |||
dstUrl := url.URL{ | |||
Scheme: "http", | |||
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||
} | |||
u, err := url.Parse(dstUrl.String()) | |||
if err != nil { | |||
return nil, err | |||
} | |||
u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "cache_object", input.ObjectKey) | |||
query := u.Query() | |||
if input.Filter != "" { | |||
query.Set("filter", input.Filter) | |||
} | |||
u.RawQuery = query.Encode() | |||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), nil) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if input.Range != "" { | |||
req.Header.Set(headers.Range, input.Range) | |||
} | |||
return req, nil | |||
} | |||
// Validate validates GetUrfsInput fields. | |||
func (i *GetUrfsInput) Validate() error { | |||
if i.Endpoint == "" { | |||
return errors.New("invalid Endpoint") | |||
} | |||
if i.BucketName == "" { | |||
return errors.New("invalid BucketName") | |||
} | |||
if i.ObjectKey == "" { | |||
return errors.New("invalid ObjectKey") | |||
} | |||
return nil | |||
} | |||
// GetUrfsStatusWithContext returns schedule task status. | |||
func (dfs *dfstore) GetUrfsStatusWithContext(ctx context.Context, input *GetUrfsInput) (io.ReadCloser, error) { | |||
req, err := dfs.GetUrfsStatusRequestWithContext(ctx, input) | |||
if err != nil { | |||
return nil, err | |||
} | |||
resp, err := dfs.httpClient.Do(req) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if resp.StatusCode/100 != 2 { | |||
return nil, fmt.Errorf("bad response status %s", resp.Status) | |||
} | |||
return resp.Body, nil | |||
} | |||
// GetObjectStatusRequestWithContext returns *http.Request of check schedule task status. | |||
func (dfs *dfstore) GetUrfsStatusRequestWithContext(ctx context.Context, input *GetUrfsInput) (*http.Request, error) { | |||
if err := input.Validate(); err != nil { | |||
return nil, err | |||
} | |||
dstUrl := url.URL{ | |||
Scheme: "http", | |||
Host: fmt.Sprintf("%s:%d", input.DstPeer, config.DefaultObjectStorageStartPort), | |||
} | |||
u, err := url.Parse(dstUrl.String()) | |||
if err != nil { | |||
return nil, err | |||
} | |||
u.Path = path.Join("buckets", input.BucketName+"."+input.Endpoint, "check_object", input.ObjectKey) | |||
query := u.Query() | |||
if input.Filter != "" { | |||
query.Set("filter", input.Filter) | |||
} | |||
u.RawQuery = query.Encode() | |||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if input.Range != "" { | |||
req.Header.Set(headers.Range, input.Range) | |||
} | |||
return req, nil | |||
} |
@@ -0,0 +1,5 @@ | |||
// Code generated by MockGen. DO NOT EDIT. | |||
// Source: objectstorage.go | |||
// Package mocks is a generated GoMock package. | |||
package mocks |
@@ -0,0 +1,47 @@ | |||
/* | |||
* Copyright 2022 The Dragonfly Authors | |||
* | |||
* Licensed under the Apache License, Version 2.0 (the "License"); | |||
* you may not use this file except in compliance with the License. | |||
* You may obtain a copy of the License at | |||
* | |||
* http://www.apache.org/licenses/LICENSE-2.0 | |||
* | |||
* Unless required by applicable law or agreed to in writing, software | |||
* distributed under the License is distributed on an "AS IS" BASIS, | |||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
* See the License for the specific language governing permissions and | |||
* limitations under the License. | |||
*/ | |||
//go:generate mockgen -destination mocks/objectstorage_mock.go -source objectstorage.go -package mocks | |||
package objectstorage | |||
type ObjectMetadata struct { | |||
// Key is object key. | |||
Key string | |||
// ContentDisposition is Content-Disposition header. | |||
ContentDisposition string | |||
// ContentEncoding is Content-Encoding header. | |||
ContentEncoding string | |||
// ContentLanguage is Content-Language header. | |||
ContentLanguage string | |||
// ContentLanguage is Content-Length header. | |||
ContentLength int64 | |||
// ContentType is Content-Type header. | |||
ContentType string | |||
// ETag is ETag header. | |||
ETag string | |||
// Digest is object digest. | |||
Digest string | |||
} | |||
@@ -0,0 +1,112 @@ | |||
package urchin | |||
import ( | |||
"encoding/json" | |||
"fmt" | |||
"strings" | |||
"code.gitea.io/gitea/models" | |||
"code.gitea.io/gitea/modules/labelmsg" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
) | |||
type DecompressReq struct { | |||
SourceFile string `json:"source_file"` | |||
DestPath string `json:"dest_path"` | |||
} | |||
var urfsClient Urchinfs | |||
func getUrfsClient() { | |||
if urfsClient != nil { | |||
return | |||
} | |||
urfsClient = New() | |||
} | |||
func GetBackNpuModel(cloudbrainID int64, endpoint, bucket, objectKey, destPeerHost string) error { | |||
getUrfsClient() | |||
res, err := urfsClient.ScheduleDataToPeerByKey(endpoint, bucket, objectKey, destPeerHost) | |||
if err != nil { | |||
log.Error("ScheduleDataToPeerByKey failed:%v", err) | |||
return err | |||
} | |||
_, err = models.InsertScheduleRecord(&models.ScheduleRecord{ | |||
CloudbrainID: cloudbrainID, | |||
EndPoint: res.DataEndpoint, | |||
Bucket: res.DataRoot, | |||
ObjectKey: res.DataPath, | |||
ProxyServer: destPeerHost, | |||
Status: res.StatusCode, | |||
}) | |||
if err != nil { | |||
log.Error("InsertScheduleRecord failed:%v", err) | |||
return err | |||
} | |||
switch res.StatusCode { | |||
case models.StorageScheduleSucceed: | |||
log.Info("ScheduleDataToPeerByKey succeed") | |||
decompress(res.DataRoot+"/"+res.DataPath, setting.Bucket+"/"+strings.TrimSuffix(res.DataPath, models.ModelSuffix)) | |||
case models.StorageScheduleProcessing: | |||
log.Info("ScheduleDataToPeerByKey processing") | |||
case models.StorageScheduleFailed: | |||
log.Error("ScheduleDataToPeerByKey failed:%s", res.StatusMsg) | |||
return fmt.Errorf("GetBackNpuModel failed:%s", res.StatusMsg) | |||
default: | |||
log.Info("ScheduleDataToPeerByKey failed, unknown StatusCode:%d", res.StatusCode) | |||
return fmt.Errorf("GetBackNpuModel failed, unknow StatusCode:%d", res.StatusCode) | |||
} | |||
return nil | |||
} | |||
func HandleScheduleRecords() error { | |||
getUrfsClient() | |||
records, err := models.GetSchedulingRecord() | |||
if err != nil { | |||
log.Error("GetSchedulingRecord failed:%v", err) | |||
return err | |||
} | |||
for _, record := range records { | |||
res, err := urfsClient.CheckScheduleTaskStatusByKey(record.EndPoint, record.Bucket, record.ObjectKey, record.ProxyServer) | |||
if err != nil { | |||
log.Error("CheckScheduleTaskStatusByKey(%d) failed:%v", record.ID, err) | |||
continue | |||
} | |||
record.Status = res.StatusCode | |||
models.UpdateScheduleCols(record, "status") | |||
switch res.StatusCode { | |||
case models.StorageScheduleSucceed: | |||
log.Info("ScheduleDataToPeerByKey(%s) succeed", record.ObjectKey) | |||
decompress(record.Bucket+"/"+record.ObjectKey, setting.Bucket+"/"+strings.TrimSuffix(record.ObjectKey, models.ModelSuffix)) | |||
case models.StorageScheduleProcessing: | |||
log.Info("ScheduleDataToPeerByKey(%s) processing", record.ObjectKey) | |||
case models.StorageScheduleFailed: | |||
log.Error("ScheduleDataToPeerByKey(%s) failed:%s", record.ObjectKey, res.StatusMsg) | |||
default: | |||
log.Info("ScheduleDataToPeerByKey(%s) failed, unknown StatusCode:%d", record.ObjectKey, res.StatusCode) | |||
} | |||
} | |||
return nil | |||
} | |||
func decompress(sourceFile, destPath string) { | |||
req, _ := json.Marshal(DecompressReq{ | |||
SourceFile: sourceFile, | |||
DestPath: destPath, | |||
}) | |||
err := labelmsg.SendDecompressAttachToLabelOBS(string(req)) | |||
if err != nil { | |||
log.Error("SendDecompressTask to labelsystem (%s) failed:%s", sourceFile, err.Error()) | |||
} | |||
} |
@@ -0,0 +1,276 @@ | |||
package urchin | |||
import ( | |||
"context" | |||
"encoding/json" | |||
"errors" | |||
"io/ioutil" | |||
"net/url" | |||
"strconv" | |||
"strings" | |||
"code.gitea.io/gitea/modules/urfs_client/config" | |||
urfs "code.gitea.io/gitea/modules/urfs_client/dfstore" | |||
) | |||
type Urchinfs interface { | |||
//// schedule source dataset to target peer | |||
//ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error) | |||
// | |||
//// check schedule data to peer task status | |||
//CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error) | |||
ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) | |||
CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) | |||
} | |||
type urchinfs struct { | |||
// Initialize default urfs config. | |||
cfg *config.DfstoreConfig | |||
} | |||
// New urchinfs instance. | |||
func New() Urchinfs { | |||
urfs := &urchinfs{ | |||
cfg: config.NewDfstore(), | |||
} | |||
return urfs | |||
} | |||
const ( | |||
// UrfsScheme if the scheme of object storage. | |||
UrfsScheme = "urfs" | |||
) | |||
/* | |||
func (urfs *urchinfs) ScheduleDataToPeer(sourceUrl, destPeerHost string) (*PeerResult, error) { | |||
ctx, cancel := context.WithCancel(context.Background()) | |||
defer cancel() | |||
if err := urfs.cfg.Validate(); err != nil { | |||
return nil, err | |||
} | |||
if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil { | |||
return nil, err | |||
} | |||
// Copy object storage to local file. | |||
endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl) | |||
if err != nil { | |||
return nil, err | |||
} | |||
peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return peerResult, err | |||
} | |||
*/ | |||
func (urfs *urchinfs) ScheduleDataToPeerByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) { | |||
ctx, cancel := context.WithCancel(context.Background()) | |||
defer cancel() | |||
peerResult, err := processScheduleDataToPeer(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return peerResult, err | |||
} | |||
/* | |||
func (urfs *urchinfs) CheckScheduleTaskStatus(sourceUrl, destPeerHost string) (*PeerResult, error) { | |||
ctx, cancel := context.WithCancel(context.Background()) | |||
defer cancel() | |||
if err := urfs.cfg.Validate(); err != nil { | |||
return nil, err | |||
} | |||
if err := validateSchedulelArgs(sourceUrl, destPeerHost); err != nil { | |||
return nil, err | |||
} | |||
// Copy object storage to local file. | |||
endpoint, bucketName, objectKey, err := parseUrfsURL(sourceUrl) | |||
if err != nil { | |||
return nil, err | |||
} | |||
peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return peerResult, err | |||
} | |||
*/ | |||
func (urfs *urchinfs) CheckScheduleTaskStatusByKey(endpoint, bucketName, objectKey, destPeerHost string) (*PeerResult, error) { | |||
ctx, cancel := context.WithCancel(context.Background()) | |||
defer cancel() | |||
peerResult, err := processCheckScheduleTaskStatus(ctx, urfs.cfg, endpoint, bucketName, objectKey, destPeerHost) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return peerResult, err | |||
} | |||
// isUrfsURL determines whether the raw url is urfs url. | |||
func isUrfsURL(rawURL string) bool { | |||
u, err := url.ParseRequestURI(rawURL) | |||
if err != nil { | |||
return false | |||
} | |||
if u.Scheme != UrfsScheme || u.Host == "" || u.Path == "" { | |||
return false | |||
} | |||
return true | |||
} | |||
// Validate copy arguments. | |||
func validateSchedulelArgs(sourceUrl, destPeer string) error { | |||
if !isUrfsURL(sourceUrl) { | |||
return errors.New("source url should be urfs:// protocol") | |||
} | |||
return nil | |||
} | |||
/* | |||
// Parse object storage url. eg: urfs://源数据$endpoint/源数据$bucket/源数据filepath | |||
func parseUrfsURL(rawURL string) (string, string, string, error) { | |||
u, err := url.ParseRequestURI(rawURL) | |||
if err != nil { | |||
return "", "", "", err | |||
} | |||
if u.Scheme != UrfsScheme { | |||
return "", "", "", fmt.Errorf("invalid scheme, e.g. %s://endpoint/bucket_name/object_key", UrfsScheme) | |||
} | |||
if u.Host == "" { | |||
return "", "", "", errors.New("empty endpoint name") | |||
} | |||
if u.Path == "" { | |||
return "", "", "", errors.New("empty object path") | |||
} | |||
bucket, key, found := strings.Cut(strings.Trim(u.Path, "/"), "/") | |||
if found == false { | |||
return "", "", "", errors.New("invalid bucket and object key " + u.Path) | |||
} | |||
return u.Host, bucket, key, nil | |||
} | |||
*/ | |||
// Schedule object storage to peer. | |||
func processScheduleDataToPeer(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) { | |||
dfs := urfs.New(cfg.Endpoint) | |||
meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{ | |||
Endpoint: endpoint, | |||
BucketName: bucketName, | |||
ObjectKey: objectKey, | |||
DstPeer: dstPeer, | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
reader, err := dfs.GetUrfsWithContext(ctx, &urfs.GetUrfsInput{ | |||
Endpoint: endpoint, | |||
BucketName: bucketName, | |||
ObjectKey: objectKey, | |||
DstPeer: dstPeer, | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
defer reader.Close() | |||
body, err := ioutil.ReadAll(reader) | |||
var peerResult PeerResult | |||
if err == nil { | |||
err = json.Unmarshal((body), &peerResult) | |||
} | |||
peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&") | |||
fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if fileContentLength != meta.ContentLength { | |||
return nil, errors.New("content length inconsistent with meta") | |||
} | |||
return &peerResult, err | |||
} | |||
// check schedule task status. | |||
func processCheckScheduleTaskStatus(ctx context.Context, cfg *config.DfstoreConfig, endpoint, bucketName, objectKey, dstPeer string) (*PeerResult, error) { | |||
dfs := urfs.New(cfg.Endpoint) | |||
meta, err := dfs.GetUrfsMetadataWithContext(ctx, &urfs.GetUrfsMetadataInput{ | |||
Endpoint: endpoint, | |||
BucketName: bucketName, | |||
ObjectKey: objectKey, | |||
DstPeer: dstPeer, | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
reader, err := dfs.GetUrfsStatusWithContext(ctx, &urfs.GetUrfsInput{ | |||
Endpoint: endpoint, | |||
BucketName: bucketName, | |||
ObjectKey: objectKey, | |||
DstPeer: dstPeer, | |||
}) | |||
if err != nil { | |||
return nil, err | |||
} | |||
defer reader.Close() | |||
body, err := ioutil.ReadAll(reader) | |||
var peerResult PeerResult | |||
if err == nil { | |||
err = json.Unmarshal((body), &peerResult) | |||
} | |||
peerResult.SignedUrl = strings.ReplaceAll(peerResult.SignedUrl, "\\u0026", "&") | |||
fileContentLength, err := strconv.ParseInt(peerResult.ContentLength, 10, 64) | |||
if err != nil { | |||
return nil, err | |||
} | |||
if fileContentLength != meta.ContentLength { | |||
return nil, err | |||
} | |||
return &peerResult, err | |||
} | |||
type PeerResult struct { | |||
ContentType string `json:"Content-Type"` | |||
ContentLength string `json:"Content-Length"` | |||
SignedUrl string | |||
DataRoot string | |||
DataPath string | |||
DataEndpoint string | |||
StatusCode int | |||
StatusMsg string | |||
TaskID string | |||
} |
@@ -266,7 +266,7 @@ page_dev_yunlao_desc4=Developers can freely select the corresponding computing r | |||
page_dev_yunlao_desc5=If your model requires more computing resources, you can also apply for it separately. | |||
page_dev_yunlao_apply=Apply Separately | |||
c2net_title=China Computing NET(C²NET) | |||
c2net_desc=Extensive access to intelligent computing centers and supercomputing centers across the country to provide users with free computing resources. | |||
c2net_desc=Extensive access to intelligent computing centers, supercomputing centers and big data centers across the country to provide users with free computing resources. | |||
c2net_center=Center | |||
search=Search | |||
search_repo=Repository | |||
@@ -1218,8 +1218,8 @@ cloudbrain.benchmark.evaluate_train=Train Script | |||
cloudbrain.benchmark.evaluate_test=Test Script | |||
cloudbrain.benchmark.types={"type":[{"id":1,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=detection","first":"Target detection","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"yangzhx","repo_name":"detection_benchmark_script"}]},{"id":2,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=reid","first":"Target re-identification","second":[{"id":1,"value":"Vehicle re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"},{"id":2,"value":"Image-based person re-identification","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"JiahongXu","repo_name":"benchmark_reID_script"}]},{"id":3,"rank_link":"https://git.openi.org.cn/benchmark/?username=admin&algType=tracking","first":"Multi-target tracking","second":[{"id":1,"value":"None","attachment":"84cf39c4-d8bc-41aa-aaa3-182ce289b105","owner":"lix07","repo_name":"MOT_benchmark_script"}]}]} | |||
cloudbrain.morethanonejob=You already have a running or waiting task, create it after that task is over. | |||
cloudbrain.morethanonejob1=You have created a <span style="color:rgba(242, 113, 28, 1);"> similar task </span> that is waiting or running, please wait for the task to finish before creating it. | |||
cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in <a href="/cloudbrains" target="_blank"> Personal Center > Cloud Brain Tasks </a>. | |||
cloudbrain.morethanonejob1=You have created an <span style="color:rgba(242, 113, 28, 1);"> equivalent task </span> that is waiting or running, please wait for the task to finish before creating it. | |||
cloudbrain.morethanonejob2=You can view all your Cloud Brain tasks in <a href="/cloudbrains" target="_blank"> Home > Cloudbrain Task </a>. | |||
modelarts.infer_job_model = Model | |||
modelarts.infer_job_model_file = Model File | |||
@@ -268,7 +268,7 @@ page_dev_yunlao_desc4=开发者可以根据使用需求,自由选择相应计 | |||
page_dev_yunlao_desc5=如果您的模型需要更多的计算资源,也可以单独申请。 | |||
page_dev_yunlao_apply=单独申请 | |||
c2net_title=中国算力网(C²NET) | |||
c2net_desc=广泛接入全国各地智算中心、超算中心,为用户提供免费算力资源 | |||
c2net_desc=广泛接入全国各地智算中心、超算中心与大数据中心等,为用户提供免费算力资源 | |||
c2net_center=中心 | |||
search=搜索 | |||
search_repo=项目 | |||
@@ -70,7 +70,7 @@ func CloudBrains(ctx *context.Context) { | |||
keyword := strings.Trim(ctx.Query("q"), " ") | |||
ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||
ciTasks, _, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||
ListOptions: models.ListOptions{ | |||
Page: page, | |||
PageSize: setting.UI.IssuePagingNum, | |||
@@ -84,7 +84,6 @@ func CloudBrains(ctx *context.Context) { | |||
IsLatestVersion: modelarts.IsLatestVersion, | |||
ComputeResource: listType, | |||
Type: models.TypeCloudBrainAll, | |||
AiCenter: aiCenter, | |||
Cluster: cluster, | |||
}) | |||
if err != nil { | |||
@@ -93,19 +92,24 @@ func CloudBrains(ctx *context.Context) { | |||
} | |||
models.LoadSpecs4CloudbrainInfo(ciTasks) | |||
for i, task := range ciTasks { | |||
ciTasks[i].CanDebug = true | |||
ciTasks[i].CanDel = true | |||
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||
tasks := []*models.CloudbrainInfo{} | |||
for _, task := range ciTasks { | |||
if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode { | |||
task.CanDebug = true | |||
task.CanDel = true | |||
task.Cloudbrain.ComputeResource = task.ComputeResource | |||
tasks = append(tasks, task) | |||
} | |||
} | |||
count := int64(len(tasks)) | |||
pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | |||
pager.SetDefaultParams(ctx) | |||
pager.AddParam(ctx, "listType", "ListType") | |||
ctx.Data["Page"] = pager | |||
ctx.Data["PageIsCloudBrain"] = true | |||
ctx.Data["Tasks"] = ciTasks | |||
ctx.Data["Tasks"] = tasks | |||
ctx.Data["CanCreate"] = true | |||
ctx.Data["Keyword"] = keyword | |||
@@ -707,6 +707,7 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Get("/issues/search", repo.SearchIssues) | |||
m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate) | |||
m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit) | |||
m.Group("/:username/:reponame", func() { | |||
m.Combo("").Get(reqAnyRepoReader(), repo.Get). | |||
@@ -172,6 +172,7 @@ func GetCloudBrainInferenceJob(ctx *context.APIContext) { | |||
"JobID": jobID, | |||
"JobStatus": job.Status, | |||
"JobDuration": job.TrainJobDuration, | |||
"StartTime": job.StartTime, | |||
}) | |||
} | |||
@@ -441,6 +442,7 @@ func ModelSafetyGetLog(ctx *context.APIContext) { | |||
"Content": result.Content, | |||
"Lines": result.Lines, | |||
"CanLogDownload": isCanDownloadLog(ctx, job), | |||
"StartTime": job.StartTime, | |||
}) | |||
} | |||
} | |||
@@ -601,6 +603,7 @@ func CloudbrainGetLog(ctx *context.APIContext) { | |||
"Content": content, | |||
"Lines": result["Lines"], | |||
"CanLogDownload": result["FileName"] != "", | |||
"StartTime": job.StartTime, | |||
} | |||
//result := CloudbrainGetLogByJobId(job.JobID, job.JobName) | |||
ctx.JSON(http.StatusOK, re) | |||
@@ -732,7 +732,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||
keyword := strings.Trim(ctx.Query("q"), " ") | |||
ciTasks, count, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | |||
ciTasks, _, err := models.CloudbrainAll(&models.CloudbrainsOptions{ | |||
ListOptions: models.ListOptions{ | |||
Page: page, | |||
PageSize: pageSize, | |||
@@ -747,8 +747,8 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||
NeedRepoInfo: true, | |||
BeginTimeUnix: int64(recordBeginTime), | |||
EndTimeUnix: endTime.Unix(), | |||
AiCenter: aiCenter, | |||
NeedDeleteInfo: needDeleteInfo, | |||
// AiCenter: aiCenter, | |||
NeedDeleteInfo: needDeleteInfo, | |||
}) | |||
if err != nil { | |||
ctx.ServerError("Get job failed:", err) | |||
@@ -758,43 +758,45 @@ func GetCloudbrainsDetailData(ctx *context.Context) { | |||
nilTime := time.Time{} | |||
tasks := []models.TaskDetail{} | |||
for i, task := range ciTasks { | |||
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||
var taskDetail models.TaskDetail | |||
taskDetail.ID = ciTasks[i].Cloudbrain.ID | |||
taskDetail.JobID = ciTasks[i].Cloudbrain.JobID | |||
taskDetail.JobName = ciTasks[i].JobName | |||
taskDetail.DisplayJobName = ciTasks[i].DisplayJobName | |||
taskDetail.Status = ciTasks[i].Status | |||
taskDetail.JobType = ciTasks[i].JobType | |||
taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix | |||
taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration | |||
taskDetail.StartTime = ciTasks[i].StartTime | |||
taskDetail.EndTime = ciTasks[i].EndTime | |||
taskDetail.ComputeResource = ciTasks[i].ComputeResource | |||
taskDetail.Type = ciTasks[i].Cloudbrain.Type | |||
taskDetail.UserName = ciTasks[i].User.Name | |||
taskDetail.RepoID = ciTasks[i].RepoID | |||
if ciTasks[i].Repo != nil { | |||
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name | |||
taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias | |||
} | |||
if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { | |||
taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) | |||
} else { | |||
taskDetail.WorkServerNum = 1 | |||
} | |||
taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) | |||
taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) | |||
if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode { | |||
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||
var taskDetail models.TaskDetail | |||
taskDetail.ID = ciTasks[i].Cloudbrain.ID | |||
taskDetail.JobID = ciTasks[i].Cloudbrain.JobID | |||
taskDetail.JobName = ciTasks[i].JobName | |||
taskDetail.DisplayJobName = ciTasks[i].DisplayJobName | |||
taskDetail.Status = ciTasks[i].Status | |||
taskDetail.JobType = ciTasks[i].JobType | |||
taskDetail.CreatedUnix = ciTasks[i].Cloudbrain.CreatedUnix | |||
taskDetail.RunTime = ciTasks[i].Cloudbrain.TrainJobDuration | |||
taskDetail.StartTime = ciTasks[i].StartTime | |||
taskDetail.EndTime = ciTasks[i].EndTime | |||
taskDetail.ComputeResource = ciTasks[i].ComputeResource | |||
taskDetail.Type = ciTasks[i].Cloudbrain.Type | |||
taskDetail.UserName = ciTasks[i].User.Name | |||
taskDetail.RepoID = ciTasks[i].RepoID | |||
if ciTasks[i].Repo != nil { | |||
taskDetail.RepoName = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Name | |||
taskDetail.RepoAlias = ciTasks[i].Repo.OwnerName + "/" + ciTasks[i].Repo.Alias | |||
} | |||
if ciTasks[i].Cloudbrain.WorkServerNumber >= 1 { | |||
taskDetail.WorkServerNum = int64(ciTasks[i].Cloudbrain.WorkServerNumber) | |||
} else { | |||
taskDetail.WorkServerNum = 1 | |||
} | |||
taskDetail.CardDuration = repo.GetCloudbrainCardDuration(ciTasks[i].Cloudbrain) | |||
taskDetail.WaitTime = repo.GetCloudbrainWaitTime(ciTasks[i].Cloudbrain) | |||
if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { | |||
taskDetail.IsDelete = true | |||
} else { | |||
taskDetail.IsDelete = false | |||
if ciTasks[i].Cloudbrain.DeletedAt != nilTime || ciTasks[i].Repo == nil { | |||
taskDetail.IsDelete = true | |||
} else { | |||
taskDetail.IsDelete = false | |||
} | |||
taskDetail.Spec = ciTasks[i].Spec | |||
tasks = append(tasks, taskDetail) | |||
} | |||
taskDetail.Spec = ciTasks[i].Spec | |||
tasks = append(tasks, taskDetail) | |||
} | |||
count := int64(len(tasks)) | |||
pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize)) | |||
pager.SetDefaultParams(ctx) | |||
pager.AddParam(ctx, "listType", "ListType") | |||
@@ -6,6 +6,8 @@ package repo | |||
import ( | |||
"bytes" | |||
"code.gitea.io/gitea/modules/task" | |||
"code.gitea.io/gitea/routers/response" | |||
"errors" | |||
"fmt" | |||
"net/http" | |||
@@ -216,3 +218,146 @@ func handleMigrateError(ctx *context.APIContext, repoOwner *models.User, remoteA | |||
} | |||
} | |||
} | |||
func MigrateSubmit(ctx *context.APIContext, form auth.MigrateRepoForm) { | |||
log.Info("receive MigrateSubmit request") | |||
ctxUser, bizErr := checkContextUser(ctx, form.UID) | |||
if bizErr != nil { | |||
ctx.JSON(http.StatusOK, response.ResponseError(bizErr)) | |||
return | |||
} | |||
remoteAddr, err := form.ParseRemoteAddr(ctx.User) | |||
if err != nil { | |||
if models.IsErrInvalidCloneAddr(err) { | |||
addrErr := err.(models.ErrInvalidCloneAddr) | |||
switch { | |||
case addrErr.IsURLError: | |||
ctx.JSON(http.StatusOK, response.PARAM_ERROR) | |||
case addrErr.IsPermissionDenied: | |||
ctx.JSON(http.StatusOK, response.INSUFFICIENT_PERMISSION) | |||
case addrErr.IsInvalidPath: | |||
ctx.JSON(http.StatusOK, response.PARAM_ERROR) | |||
default: | |||
ctx.JSON(http.StatusOK, response.SYSTEM_ERROR) | |||
} | |||
} else { | |||
ctx.JSON(http.StatusOK, response.SYSTEM_ERROR) | |||
} | |||
return | |||
} | |||
var gitServiceType = api.PlainGitService | |||
u, err := url.Parse(form.CloneAddr) | |||
if err == nil && strings.EqualFold(u.Host, "github.com") { | |||
gitServiceType = api.GithubService | |||
} | |||
var opts = migrations.MigrateOptions{ | |||
OriginalURL: form.CloneAddr, | |||
GitServiceType: gitServiceType, | |||
CloneAddr: remoteAddr, | |||
RepoName: form.RepoName, | |||
Alias: form.Alias, | |||
Description: form.Description, | |||
Private: form.Private || setting.Repository.ForcePrivate, | |||
Mirror: form.Mirror, | |||
AuthUsername: form.AuthUsername, | |||
AuthPassword: form.AuthPassword, | |||
Wiki: form.Wiki, | |||
Issues: form.Issues, | |||
Milestones: form.Milestones, | |||
Labels: form.Labels, | |||
Comments: true, | |||
PullRequests: form.PullRequests, | |||
Releases: form.Releases, | |||
} | |||
if opts.Mirror { | |||
opts.Issues = false | |||
opts.Milestones = false | |||
opts.Labels = false | |||
opts.Comments = false | |||
opts.PullRequests = false | |||
opts.Releases = false | |||
} | |||
err = models.CheckCreateRepository(ctx.User, ctxUser, opts.RepoName, opts.Alias) | |||
if err != nil { | |||
handleMigrateError4Api(ctx, ctxUser, remoteAddr, err) | |||
return | |||
} | |||
err = task.MigrateRepository(ctx.User, ctxUser, opts) | |||
if err == nil { | |||
r := make(map[string]string) | |||
r["OpenIUrl"] = strings.TrimSuffix(setting.AppURL, "/") + "/" + ctxUser.Name + "/" + opts.RepoName | |||
r["OriginUrl"] = form.CloneAddr | |||
ctx.JSON(http.StatusOK, response.SuccessWithData(r)) | |||
return | |||
} | |||
handleMigrateError4Api(ctx, ctxUser, remoteAddr, err) | |||
} | |||
func checkContextUser(ctx *context.APIContext, uid int64) (*models.User, *response.BizError) { | |||
if uid == ctx.User.ID || uid == 0 { | |||
return ctx.User, nil | |||
} | |||
org, err := models.GetUserByID(uid) | |||
if models.IsErrUserNotExist(err) { | |||
return ctx.User, nil | |||
} | |||
if err != nil { | |||
return nil, response.SYSTEM_ERROR | |||
} | |||
// Check ownership of organization. | |||
if !org.IsOrganization() { | |||
return nil, nil | |||
} | |||
if !ctx.User.IsAdmin { | |||
canCreate, err := org.CanCreateOrgRepo(ctx.User.ID) | |||
if err != nil { | |||
return nil, response.NewBizError(err) | |||
} else if !canCreate { | |||
return nil, response.INSUFFICIENT_PERMISSION | |||
} | |||
} | |||
return org, nil | |||
} | |||
func handleMigrateError4Api(ctx *context.APIContext, repoOwner *models.User, remoteAddr string, err error) { | |||
switch { | |||
case models.IsErrRepoAlreadyExist(err): | |||
ctx.JSON(http.StatusOK, response.Error(3, "The repository with the same name already exists.")) | |||
case migrations.IsRateLimitError(err): | |||
ctx.JSON(http.StatusOK, response.ServerError("Remote visit addressed rate limitation.")) | |||
case migrations.IsTwoFactorAuthError(err): | |||
ctx.JSON(http.StatusOK, response.ServerError("Remote visit required two factors authentication.")) | |||
case models.IsErrReachLimitOfRepo(err): | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("You have already reached your limit of %d repositories.", repoOwner.MaxCreationLimit()))) | |||
case models.IsErrNameReserved(err): | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The username '%s' is reserved.", err.(models.ErrNameReserved).Name))) | |||
case models.IsErrNameCharsNotAllowed(err): | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The username '%s' contains invalid characters.", err.(models.ErrNameCharsNotAllowed).Name))) | |||
case models.IsErrNamePatternNotAllowed(err): | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("The pattern '%s' is not allowed in a username.", err.(models.ErrNamePatternNotAllowed).Pattern))) | |||
default: | |||
err = util.URLSanitizedError(err, remoteAddr) | |||
if strings.Contains(err.Error(), "Authentication failed") || | |||
strings.Contains(err.Error(), "Bad credentials") || | |||
strings.Contains(err.Error(), "could not read Username") { | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("Authentication failed: %v.", err))) | |||
} else if strings.Contains(err.Error(), "fatal:") { | |||
ctx.JSON(http.StatusOK, response.ServerError(fmt.Sprintf("Migration failed: %v.", err))) | |||
} else { | |||
ctx.JSON(http.StatusOK, response.ServerError(err.Error())) | |||
} | |||
} | |||
} | |||
func QueryRepoSatus(ctx *context.APIContext, form auth.MigrateRepoForm) { | |||
} |
@@ -12,6 +12,8 @@ import ( | |||
"strconv" | |||
"strings" | |||
"code.gitea.io/gitea/modules/urfs_client/urchin" | |||
"code.gitea.io/gitea/modules/notification" | |||
"code.gitea.io/gitea/modules/grampus" | |||
@@ -25,6 +27,7 @@ import ( | |||
"code.gitea.io/gitea/modules/storage" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
routerRepo "code.gitea.io/gitea/routers/repo" | |||
cloudbrainService "code.gitea.io/gitea/services/cloudbrain" | |||
) | |||
func GetModelArtsNotebook2(ctx *context.APIContext) { | |||
@@ -49,6 +52,7 @@ func GetModelArtsNotebook2(ctx *context.APIContext) { | |||
"JobName": job.JobName, | |||
"JobStatus": job.Status, | |||
"JobDuration": job.TrainJobDuration, | |||
"StartTime": job.StartTime, | |||
}) | |||
} | |||
@@ -169,17 +173,20 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { | |||
if len(result.JobInfo.Tasks) > 0 { | |||
if len(result.JobInfo.Tasks[0].CenterID) > 0 && len(result.JobInfo.Tasks[0].CenterName) > 0 { | |||
job.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0] | |||
aiCenterName = result.JobInfo.Tasks[0].CenterName[0] | |||
// aiCenterName = result.JobInfo.Tasks[0].CenterName[0] | |||
aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context) | |||
} | |||
} | |||
} else { | |||
temp := strings.Split(job.AiCenter, "+") | |||
if len(temp) > 1 { | |||
aiCenterName = temp[1] | |||
} | |||
aiCenterName = cloudbrainService.GetAiCenterShow(job.AiCenter, ctx.Context) | |||
} | |||
if oldStatus != job.Status { | |||
notification.NotifyChangeCloudbrainStatus(job, oldStatus) | |||
if models.IsTrainJobTerminal(job.Status) { | |||
if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||
urchin.GetBackNpuModel(job.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(job.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||
} | |||
} | |||
} | |||
err = models.UpdateTrainJobVersion(job) | |||
if err != nil { | |||
@@ -192,6 +199,7 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) { | |||
"JobStatus": job.Status, | |||
"JobDuration": job.TrainJobDuration, | |||
"AiCenter": aiCenterName, | |||
"StartTime": job.StartTime, | |||
}) | |||
} | |||
@@ -319,6 +327,7 @@ func TrainJobGetLog(ctx *context.APIContext) { | |||
"Content": result.Content, | |||
"Lines": result.Lines, | |||
"CanLogDownload": canLogDownload, | |||
"StartTime": task.StartTime, | |||
}) | |||
} | |||
@@ -458,6 +467,7 @@ func ModelList(ctx *context.APIContext) { | |||
return | |||
} | |||
status := models.StorageScheduleSucceed | |||
var fileInfos []storage.FileInfo | |||
if task.ComputeResource == models.NPUResource { | |||
fileInfos, err = storage.GetObsListObject(task.JobName, "output/", parentDir, versionName) | |||
@@ -466,6 +476,30 @@ func ModelList(ctx *context.APIContext) { | |||
ctx.ServerError("GetObsListObject:", err) | |||
return | |||
} | |||
if task.Type == models.TypeC2Net { | |||
if len(fileInfos) > 0 { | |||
status = models.StorageScheduleSucceed | |||
} else { | |||
if models.IsTrainJobTerminal(task.Status) { | |||
if task.Status == models.GrampusStatusStopped { | |||
status = models.StorageNoFile | |||
} else { | |||
record, _ := models.GetScheduleRecordByCloudbrainID(task.ID) | |||
if record != nil { | |||
status = record.Status | |||
if status == models.StorageScheduleSucceed { | |||
status = models.StorageNoFile | |||
} | |||
} else { | |||
status = models.StorageScheduleProcessing | |||
} | |||
} | |||
} else { | |||
status = models.StorageScheduleWaiting | |||
} | |||
} | |||
} | |||
} else if task.ComputeResource == models.GPUResource { | |||
files, err := routerRepo.GetModelDirs(task.JobName, parentDir) | |||
if err != nil { | |||
@@ -485,7 +519,7 @@ func ModelList(ctx *context.APIContext) { | |||
ctx.JSON(http.StatusOK, map[string]interface{}{ | |||
"JobID": jobID, | |||
"VersionName": versionName, | |||
"StatusOK": 0, | |||
"StatusOK": status, | |||
"Path": dirArray, | |||
"Dirs": fileInfos, | |||
"task": task, | |||
@@ -514,6 +548,7 @@ func GetModelArtsInferenceJob(ctx *context.APIContext) { | |||
"JobID": jobID, | |||
"JobStatus": job.Status, | |||
"JobDuration": job.TrainJobDuration, | |||
"StartTime": job.StartTime, | |||
}) | |||
} | |||
@@ -2,6 +2,7 @@ package repo | |||
import ( | |||
"bufio" | |||
"code.gitea.io/gitea/modules/urfs_client/urchin" | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
@@ -1943,6 +1944,11 @@ func SyncCloudbrainStatus() { | |||
task.CorrectCreateUnix() | |||
if oldStatus != task.Status { | |||
notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||
if models.IsTrainJobTerminal(task.Status) { | |||
if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||
} | |||
} | |||
} | |||
err = models.UpdateJob(task) | |||
if err != nil { | |||
@@ -1,6 +1,7 @@ | |||
package repo | |||
import ( | |||
"code.gitea.io/gitea/modules/urfs_client/urchin" | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
@@ -431,7 +432,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
//prepare command | |||
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | |||
command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName) | |||
command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, form.CkptName, "") | |||
if err != nil { | |||
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU) | |||
@@ -688,7 +689,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain | |||
//prepare command | |||
preTrainModelPath := getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) | |||
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName) | |||
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, form.CkptName, grampus.GetNpuModelRemoteObsUrl(jobName)) | |||
if err != nil { | |||
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"]) | |||
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU) | |||
@@ -862,7 +863,7 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||
} | |||
oldStatus := task.Status | |||
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status) | |||
if task.Status != result.JobInfo.Status || result.JobInfo.Status == models.GrampusStatusRunning { | |||
if task.Status != oldStatus || task.Status == models.GrampusStatusRunning { | |||
task.Duration = result.JobInfo.RunSec | |||
if task.Duration < 0 { | |||
task.Duration = 0 | |||
@@ -878,6 +879,11 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||
task.CorrectCreateUnix() | |||
if oldStatus != task.Status { | |||
notification.NotifyChangeCloudbrainStatus(task, oldStatus) | |||
if models.IsTrainJobTerminal(task.Status) { | |||
if len(result.JobInfo.Tasks[0].CenterID) == 1 { | |||
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID)) | |||
} | |||
} | |||
} | |||
err = models.UpdateJob(task) | |||
if err != nil { | |||
@@ -916,7 +922,7 @@ func GrampusTrainJobShow(ctx *context.Context) { | |||
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task) | |||
ctx.Data["displayJobName"] = task.DisplayJobName | |||
ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter,ctx) | |||
ctx.Data["ai_center"] = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx) | |||
ctx.HTML(http.StatusOK, tplGrampusTrainJobShow) | |||
} | |||
@@ -972,15 +978,18 @@ func GrampusGetLog(ctx *context.Context) { | |||
return | |||
} | |||
func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName string) (string, error) { | |||
func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) { | |||
var command string | |||
//prepare | |||
workDir := grampus.NpuWorkDir | |||
if processorType == grampus.ProcessorTypeGPU { | |||
if processorType == grampus.ProcessorTypeNPU { | |||
command += "pwd;cd " + workDir + grampus.CommandPrepareScriptNpu | |||
} else if processorType == grampus.ProcessorTypeGPU { | |||
workDir = grampus.GpuWorkDir | |||
command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScriptGpu, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject) | |||
} | |||
command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScript, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject) | |||
//download code & dataset | |||
if processorType == grampus.ProcessorTypeNPU { | |||
//no need to download code & dataset by internet | |||
@@ -995,7 +1004,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||
//no need to process | |||
} else if processorType == grampus.ProcessorTypeGPU { | |||
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName) | |||
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand | |||
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand | |||
command += commandUnzip | |||
} | |||
@@ -1029,7 +1038,8 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||
var commandCode string | |||
if processorType == grampus.ProcessorTypeNPU { | |||
commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py /tmp/log/train.log" + paramCode + ";" | |||
paramCode += " --model_url=" + modelRemoteObsUrl | |||
commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py " + grampus.NpuLocalLogUrl + paramCode + ";" | |||
} else if processorType == grampus.ProcessorTypeGPU { | |||
if pretrainModelFileName != "" { | |||
paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName | |||
@@ -1045,8 +1055,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo | |||
//upload models | |||
if processorType == grampus.ProcessorTypeNPU { | |||
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_npu " + setting.Bucket + " " + outputRemotePath + " " + workDir + "output/;" | |||
command += commandUpload | |||
// no need to upload | |||
} else if processorType == grampus.ProcessorTypeGPU { | |||
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;" | |||
command += commandUpload | |||
@@ -1077,6 +1086,7 @@ func generateDatasetUnzipCommand(datasetName string) string { | |||
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") { | |||
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';" | |||
} | |||
unZipDatasetCommand += "rm -f '" + datasetName + "';" | |||
} else { //多数据集 | |||
for _, datasetNameTemp := range datasetNameArray { | |||
@@ -1085,6 +1095,7 @@ func generateDatasetUnzipCommand(datasetName string) string { | |||
} else { | |||
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';" | |||
} | |||
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';" | |||
} | |||
} | |||
@@ -1,8 +1,14 @@ | |||
package response | |||
//repo response | |||
var RESOURCE_QUEUE_NOT_AVAILABLE = &BizError{Code: 1001, Err: "resource queue not available"} | |||
var SPECIFICATION_NOT_EXIST = &BizError{Code: 1002, Err: "specification not exist"} | |||
var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not available"} | |||
var CATEGORY_STILL_HAS_BADGES = &BizError{Code: 1004, Err: "Please delete badges in the category first"} | |||
var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of badge first"} | |||
//common response | |||
var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"} | |||
var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"} | |||
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error permissions"} |
@@ -812,7 +812,7 @@ func Cloudbrains(ctx *context.Context) { | |||
for i, _ := range repos { | |||
repoIDList = append(repoIDList, repos[i].ID) | |||
} | |||
ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||
ciTasks, _, err := models.Cloudbrains(&models.CloudbrainsOptions{ | |||
ListOptions: models.ListOptions{ | |||
Page: page, | |||
PageSize: setting.UI.IssuePagingNum, | |||
@@ -828,7 +828,6 @@ func Cloudbrains(ctx *context.Context) { | |||
RepoIDList: repoIDList, | |||
ComputeResource: listType, | |||
Type: models.TypeCloudBrainAll, | |||
AiCenter: aiCenter, | |||
Cluster: cluster, | |||
}) | |||
if err != nil { | |||
@@ -836,18 +835,22 @@ func Cloudbrains(ctx *context.Context) { | |||
return | |||
} | |||
models.LoadSpecs4CloudbrainInfo(ciTasks) | |||
for i, task := range ciTasks { | |||
ciTasks[i].CanDebug = true | |||
ciTasks[i].CanDel = true | |||
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource | |||
tasks := []*models.CloudbrainInfo{} | |||
for _, task := range ciTasks { | |||
if aiCenter == "" || aiCenter == task.Cloudbrain.Spec.AiCenterCode { | |||
task.CanDebug = true | |||
task.CanDel = true | |||
task.Cloudbrain.ComputeResource = task.ComputeResource | |||
tasks = append(tasks, task) | |||
} | |||
} | |||
count := int64(len(tasks)) | |||
pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, getTotalPage(count, setting.UI.IssuePagingNum)) | |||
pager.SetDefaultParams(ctx) | |||
pager.AddParam(ctx, "listType", "ListType") | |||
ctx.Data["Page"] = pager | |||
ctx.Data["PageIsUserCloudBrain"] = true | |||
ctx.Data["Tasks"] = ciTasks | |||
ctx.Data["Tasks"] = tasks | |||
ctx.Data["CanCreate"] = true | |||
ctx.Data["Keyword"] = keyword | |||
@@ -497,6 +497,17 @@ | |||
</div> | |||
</div> | |||
</div> | |||
<div class="ui modal debug-again-alert"> | |||
<div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||
<div style="display: flex;align-items: center;"> | |||
<i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||
<div style="text-align: left;margin-left: 1rem;"> | |||
<div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||
<div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
<script> | |||
document.addEventListener('DOMContentLoaded', function() { | |||
@@ -16,19 +16,11 @@ | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | |||
</div> | |||
</div> | |||
<div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
<div class="default text" style="color: rgba(0,0,0,.87);">{{.i18n.Tr "cloudbrain.all_ai_center"}}</div> | |||
<div class="ui selection dropdown" id="aiCenter-sel" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
<div class="default text" style="color: rgba(0,0,0,.87);" aicenter="{{$.aiCenter}}">{{if eq $.aiCenter ""}}{{.i18n.Tr "cloudbrain.all_ai_center"}}{{end}}</div> | |||
<i class="dropdown icon"></i> | |||
<div class="menu"> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=鹏城云计算所" data-value="鹏城云计算所">鹏城云计算所</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=成都智算" data-value="成都智算">成都智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=合肥类脑" data-value="合肥类脑">合肥类脑</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=octopus" data-value="octopus">octopus</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=武汉智算" data-value="武汉智算">武汉智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=西安智算" data-value="西安智算">西安智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=中原智算" data-value="中原智算">中原智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=许昌AI中心" data-value="许昌AI中心">许昌AI中心</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||
</div> | |||
</div> | |||
<div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
@@ -73,4 +65,44 @@ | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
<script> | |||
;(function() { | |||
document.addEventListener('DOMContentLoaded', function() { | |||
$.ajax({ | |||
type: "GET", | |||
url: "/admin/resources/queue/centers", | |||
dataType: "json", | |||
data: {}, | |||
success: function (res) { | |||
if (res && res.Code === 0) { | |||
var data = res.Data; | |||
var aiCenterSelEl = $('#aiCenter-sel'); | |||
var itemEl = aiCenterSelEl.find('.menu .item').eq(0); | |||
var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); | |||
var selectAiCenterName = ''; | |||
for (var i = 0, iLen = data.length; i < iLen; i++) { | |||
var dataI = data[i]; | |||
var itemClone = itemEl.clone(); | |||
var oHref = itemClone.attr('href'); | |||
var oId = itemClone.attr('id'); | |||
itemClone.attr('data-value', dataI.AiCenterCode); | |||
itemClone.removeAttr('id'); | |||
itemClone.attr('href', oHref + dataI.AiCenterCode); | |||
itemClone.text(dataI.AiCenterName); | |||
aiCenterSelEl.find('.menu').append(itemClone); | |||
if (selectAiCenterCode === dataI.AiCenterCode) { | |||
selectAiCenterName = dataI.AiCenterName; | |||
} | |||
} | |||
selectAiCenterCode && aiCenterSelEl.dropdown('set selected', selectAiCenterCode); | |||
selectAiCenterName && aiCenterSelEl.dropdown('set text', selectAiCenterName); | |||
} | |||
}, | |||
error: function (err) { | |||
console.log(err); | |||
} | |||
}); | |||
}); | |||
})(); | |||
</script> |
@@ -28,19 +28,11 @@ | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster=resource_cluster_c2net&aiCenter={{$.aiCenter}}" data-value="{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}">{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}</a> | |||
</div> | |||
</div> | |||
<div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
<div class="default text" style="color: rgba(0,0,0,.87);">{{.i18n.Tr "cloudbrain.all_ai_center"}}</div> | |||
<div class="ui selection dropdown" id="aiCenter-sel" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
<div class="default text" style="color: rgba(0,0,0,.87);" aicenter="{{$.aiCenter}}">{{if eq $.aiCenter ""}}{{.i18n.Tr "cloudbrain.all_ai_center"}}{{end}}</div> | |||
<i class="dropdown icon"></i> | |||
<div class="menu"> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=鹏城云计算所" data-value="鹏城云计算所">鹏城云计算所</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=成都智算" data-value="成都智算">成都智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=合肥类脑" data-value="合肥类脑">合肥类脑</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=octopus" data-value="octopus">octopus</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=武汉智算" data-value="武汉智算">武汉智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=西安智算" data-value="西安智算">西安智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=中原智算" data-value="中原智算">中原智算</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=许昌AI中心" data-value="许昌AI中心">许昌AI中心</a> | |||
<a class="item" href="{{$.Link}}?q={{$.Keyword}}&jobType={{$.JobType}}&listType={{$.ListType}}&jobStatus={{$.JobStatus}}&cluster={{$.cluster}}&aiCenter=" data-value='{{.i18n.Tr "cloudbrain.all_ai_center"}}'>{{.i18n.Tr "cloudbrain.all_ai_center"}}</a> | |||
</div> | |||
</div> | |||
<div class="ui selection dropdown" style="min-width: 10em;min-height:2.6em;border-radius: .28571429rem;margin-right: 1em;padding: .67em 3.2em .7em 1em;"> | |||
@@ -87,4 +79,44 @@ | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
<script> | |||
;(function() { | |||
document.addEventListener('DOMContentLoaded', function() { | |||
$.ajax({ | |||
type: "GET", | |||
url: "/admin/resources/queue/centers", | |||
dataType: "json", | |||
data: {}, | |||
success: function (res) { | |||
if (res && res.Code === 0) { | |||
var data = res.Data; | |||
var aiCenterSelEl = $('#aiCenter-sel'); | |||
var itemEl = aiCenterSelEl.find('.menu .item').eq(0); | |||
var selectAiCenterCode = aiCenterSelEl.find('.default').attr('aicenter'); | |||
var selectAiCenterName = ''; | |||
for (var i = 0, iLen = data.length; i < iLen; i++) { | |||
var dataI = data[i]; | |||
var itemClone = itemEl.clone(); | |||
var oHref = itemClone.attr('href'); | |||
var oId = itemClone.attr('id'); | |||
itemClone.attr('data-value', dataI.AiCenterCode); | |||
itemClone.removeAttr('id'); | |||
itemClone.attr('href', oHref + dataI.AiCenterCode); | |||
itemClone.text(dataI.AiCenterName); | |||
aiCenterSelEl.find('.menu').append(itemClone); | |||
if (selectAiCenterCode === dataI.AiCenterCode) { | |||
selectAiCenterName = dataI.AiCenterName; | |||
} | |||
} | |||
selectAiCenterCode && aiCenterSelEl.dropdown('set selected', selectAiCenterCode); | |||
selectAiCenterName && aiCenterSelEl.dropdown('set text', selectAiCenterName); | |||
} | |||
}, | |||
error: function (err) { | |||
console.log(err); | |||
} | |||
}); | |||
}); | |||
})(); | |||
</script> |
@@ -1,15 +1,22 @@ | |||
<div> | |||
<div class="ui modal max-full-log{{.VersionName}} container" style="height: 90%;margin: 3rem auto;"> | |||
<div class="file-info" style="padding: 2rem;justify-content: space-between;"> | |||
<div class="file-info" style="padding: 2rem;justify-content: space-between;height: 10%;"> | |||
<div id="log-file-title" style="font-size: 16px;font-weight:600"></div> | |||
<div> | |||
<div class="file-info"> | |||
<a class="file-info" id="{{.VersionName}}-log-down" href=""> | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;font-size: 12px;" class="log-file-down"></span> | |||
</a> | |||
<div class="file-info close-log-dialog" data-version="{{.VersionName}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<i class="ri-fullscreen-exit-fill" style="font-size: 16px;"></i> | |||
<span id="log-file-exit" style="margin-left: 0.3rem;font-size: 12px;"></span> | |||
</div> | |||
</div> | |||
</div> | |||
<div style="margin: 0 2rem;border: 1px solid #e8e8e8;height: 88%;position: relative;"> | |||
<div style="margin: 0 2.5rem;border: 1px solid #e8e8e8;height: 85%;position: relative;"> | |||
<span> | |||
<a style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top-max" data-version="{{.VersionName}}" data-max="-max"><i class="icon-to-top"></i></a> | |||
@@ -531,19 +531,22 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
</div> | |||
<div | |||
style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | |||
<span> | |||
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||
</span> | |||
<span class="log-info-{{.VersionName}}"> | |||
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
class="log_bottom" data-version="{{.VersionName}}"><i | |||
class="icon-to-bottom"></i></a> | |||
</span> | |||
@@ -315,7 +315,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
<span style="font-size: 12px;" class=""> | |||
<span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||
{{if not (eq .StartTime 0)}} | |||
{{TimeSinceUnix1 .StartTime}} | |||
{{else}} | |||
@@ -542,7 +542,10 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
@@ -550,11 +553,11 @@ | |||
<div | |||
style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | |||
<span> | |||
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||
</span> | |||
<span class="log-info-{{.VersionName}}"> | |||
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
class="log_bottom" data-version="{{.VersionName}}"><i | |||
class="icon-to-bottom"></i></a> | |||
</span> | |||
@@ -570,7 +573,7 @@ | |||
<input type="hidden" name="end_line-max" value> | |||
<input type="hidden" name="start_line-max" value> | |||
<input type="hidden" name="start_line-max-copy" value> | |||
<input type="hidden" name="start_line" value> <input type="hidden" name="start_line" value> | |||
<input type="hidden" name="start_line" value> | |||
<input type="hidden" name="init_log" value> | |||
<pre id="log_file{{.VersionName}}"></pre> | |||
</div> | |||
@@ -331,7 +331,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
<span style="font-size: 12px;"> | |||
<span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||
{{if not (eq .StartTime 0)}} | |||
{{TimeSinceUnix1 .StartTime}} | |||
{{else}} | |||
@@ -531,18 +531,21 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain/{{.ID}}/download_log_file" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
</div> | |||
<div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | |||
<span> | |||
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||
</span> | |||
<span class="log-info-{{.VersionName}}"> | |||
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
class="log_bottom" data-version="{{.VersionName}}"><i | |||
class="icon-to-bottom"></i></a> | |||
</span> | |||
@@ -558,7 +561,7 @@ | |||
<input type="hidden" name="end_line-max" value> | |||
<input type="hidden" name="start_line-max" value> | |||
<input type="hidden" name="start_line-max-copy" value> | |||
<input type="hidden" name="start_line" value> <input type="hidden" name="start_line" value> | |||
<input type="hidden" name="start_line" value> | |||
<input type="hidden" name="init_log" value> | |||
<pre id="log_file{{.VersionName}}"></pre> | |||
</div> | |||
@@ -419,6 +419,18 @@ | |||
</div> | |||
</div> | |||
</div> | |||
<div class="ui modal debug-again-alert"> | |||
<div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||
<div style="display: flex;align-items: center;"> | |||
<i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||
<div style="text-align: left;margin-left: 1rem;"> | |||
<div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||
<div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
{{template "base/footer" .}} | |||
<script> | |||
// 调试和评分新开窗口 | |||
@@ -330,7 +330,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
<span style="font-size: 12px;" class=""> | |||
<span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||
{{if not (eq .StartTime 0)}} | |||
{{TimeSinceUnix1 .StartTime}} | |||
{{else}} | |||
@@ -527,7 +527,10 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-log-type="c2Net" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-log-type="c2Net" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/grampus/train-job/{{.JobID}}/download_log" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
@@ -263,7 +263,7 @@ td, th { | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
<span style="font-size: 12px;" class=""> | |||
<span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||
{{if not (eq .StartTime 0)}} | |||
{{TimeSinceUnix1 .StartTime}} | |||
{{else}} | |||
@@ -467,18 +467,21 @@ td, th { | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
</div> | |||
<div style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | |||
<span> | |||
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a> | |||
</span> | |||
<span class="log-info-{{.VersionName}}"> | |||
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
class="log_bottom" data-version="{{.VersionName}}"><i | |||
class="icon-to-bottom"></i></a> | |||
</span> | |||
@@ -493,7 +496,7 @@ td, th { | |||
<input type="hidden" name="end_line-max" value> | |||
<input type="hidden" name="start_line-max" value> | |||
<input type="hidden" name="start_line-max-copy" value> | |||
<input type="hidden" name="start_line" value> <input type="hidden" name="start_line" value> | |||
<input type="hidden" name="start_line" value> | |||
<input type="hidden" name="init_log" value> | |||
<pre id="log_file{{.VersionName}}"></pre> | |||
</div> | |||
@@ -370,7 +370,7 @@ | |||
<td class="ti-text-form-content"> | |||
<div class="text-span text-span-w"> | |||
<span style="font-size: 12px;" class=""> | |||
<span style="font-size: 12px;" id="{{.VersionName}}-startTime"> | |||
{{if not (eq .Cloudbrain.StartTime 0)}} | |||
{{TimeSinceUnix1 .Cloudbrain.StartTime}} | |||
{{else}} | |||
@@ -555,7 +555,10 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/download_log_file?version_name={{.VersionName}}" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
@@ -56,7 +56,10 @@ | |||
{{template "repo/header" .}} | |||
<div class="ui container"> | |||
<div class="cloudbrain-type" style="display: none;" data-cloudbrain-type="{{.datasetType}}" data-repo-link="{{.RepoLink}}" data-flag-model="true" data-dataset-uuid="{{.attachment}}" data-dataset-name="{{.dataset_name}}"></div> | |||
{{template "base/alert" .}} | |||
{{if eq .NotStopTaskCount 0}} | |||
{{template "base/alert" .}} | |||
{{end}} | |||
{{template "custom/alert_cb" .}} | |||
<h4 class="ui top attached header"> | |||
{{.i18n.Tr "repo.modelarts.train_job.new"}} | |||
</h4> | |||
@@ -260,7 +263,7 @@ | |||
</div> | |||
<div class="inline unite min_title field"> | |||
<button class="ui create_train_job green button"> | |||
<button class="ui create_train_job green button {{if eq .NotStopTaskCount 1}}disabled{{end}}"> | |||
{{.i18n.Tr "repo.cloudbrain.new"}} | |||
</button> | |||
<a class="ui button cancel" href="{{.RepoLink}}/modelarts/train-job">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a> | |||
@@ -474,7 +474,10 @@ | |||
<i class="ri-download-cloud-2-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.download_log"}}</span> | |||
</a> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;"> | |||
<div class="file-info full-log-dialog" data-version="{{.VersionName}}" data-log="{{$.i18n.Tr "repo.modelarts.log_file"}}" | |||
data-exit="{{$.i18n.Tr "repo.modelarts.exit_full_screen"}}" style="margin-left: 1rem;color:#0366d6;cursor: pointer;" | |||
data-log-down="{{$.i18n.Tr "repo.modelarts.download_log"}}" data-href="/api/v1/repos/{{$.RepoRelPath}}/cloudbrain" | |||
data-scroll-top="{{$.i18n.Tr "repo.log_scroll_start"}}" data-scroll-bottom="{{$.i18n.Tr "repo.log_scroll_end"}}"> | |||
<i class="ri-aspect-ratio-line"></i> | |||
<span style="margin-left: 0.3rem;">{{$.i18n.Tr "repo.modelarts.fullscreen_log_file"}}</span> | |||
</div> | |||
@@ -482,11 +485,11 @@ | |||
<div | |||
style="position: relative;border: 1px solid rgba(0,0,0,.2);padding: 0 10px;margin-top: 10px;"> | |||
<span> | |||
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_start"}}" style="position: absolute; right: -32px;cursor: pointer;" | |||
class="log_top" data-version=""><i class="icon-to-top"></i></a> | |||
</span> | |||
<span class="log-info-"> | |||
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
<a title="{{$.i18n.Tr "repo.log_scroll_end"}}" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" | |||
class="log_bottom" data-version=""><i | |||
class="icon-to-bottom"></i></a> | |||
</span> | |||
@@ -1,7 +1,7 @@ | |||
{{template "base/head" .}} | |||
<!-- 提示框 --> | |||
<script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script> | |||
<div class="alert"></div> | |||
<script src="{{StaticUrlPrefix}}/js/specsuse.js?v={{MD5 AppVer}}" type="text/javascript"></script> | |||
<div class="explore users"> | |||
<div class="cloudbrain_debug" style="display: none;" data-debug="{{$.i18n.Tr "repo.debug"}}" | |||
data-debug-again="{{$.i18n.Tr "repo.debug_again"}}" data-debug-task="{{$.i18n.Tr "cloudbrain.DEBUG"}}" | |||
@@ -445,6 +445,18 @@ | |||
</div> | |||
</div> | |||
</div> | |||
<div class="ui modal debug-again-alert"> | |||
<div class="ui message" style="background-color: rgba(242, 113, 28, 0.05);border: 1px solid rgba(242, 113, 28, 1);border-radius: 5px;"> | |||
<div style="display: flex;align-items: center;"> | |||
<i class="ri-information-line" style="font-size: 35px;color: rgba(242, 113, 28, 1);;"></i> | |||
<div style="text-align: left;margin-left: 1rem;"> | |||
<div style="font-weight: 600;line-height: 2;">{{.i18n.Tr "repo.cloudbrain.morethanonejob1" | Safe }}</div> | |||
<div style="color:#939393">{{.i18n.Tr "repo.cloudbrain.morethanonejob2" | Safe}}</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
</div> | |||
<script> | |||