Browse Source

unzip model

pull/3134/head
lewis 2 years ago
parent
commit
0971fe2472
6 changed files with 18 additions and 12 deletions
  1. +2
    -0
      models/cloudbrain.go
  2. +1
    -1
      modules/grampus/grampus.go
  3. +4
    -8
      modules/urfs_client/urchin/schedule.go
  4. +3
    -1
      routers/api/v1/repo/modelarts.go
  5. +3
    -1
      routers/repo/cloudbrain.go
  6. +5
    -1
      routers/repo/grampus.go

+ 2
- 0
models/cloudbrain.go View File

@@ -116,6 +116,8 @@ const (
GrampusStatusStopped = "STOPPED"
GrampusStatusUnknown = "UNKNOWN"
GrampusStatusWaiting = "WAITING"

ModelSuffix = "models.zip"
)

const (


+ 1
- 1
modules/grampus/grampus.go View File

@@ -27,7 +27,7 @@ const (
CodeArchiveName = "master.zip"

BucketRemote = "grampus"
RemoteModelPath = "/output/models.zip"
RemoteModelPath = "/output/" + models.ModelSuffix
)

var (


+ 4
- 8
modules/urfs_client/urchin/schedule.go View File

@@ -1,14 +1,14 @@
package urchin

import (
"code.gitea.io/gitea/modules/labelmsg"
"code.gitea.io/gitea/modules/setting"
"encoding/json"
"fmt"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/labelmsg"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)

type DecompressReq struct {
@@ -16,10 +16,6 @@ type DecompressReq struct {
DestPath string `json:"dest_path"`
}

const (
modelSuffix = "models.zip"
)

var urfsClient Urchinfs

func getUrfsClient() {
@@ -54,7 +50,7 @@ func GetBackNpuModel(cloudbrainID int64, endpoint, bucket, objectKey, destPeerHo
switch res.StatusCode {
case models.StorageScheduleSucceed:
log.Info("ScheduleDataToPeerByKey succeed")
decompress(res.DataRoot+"/"+res.DataPath, setting.Bucket+"/"+strings.TrimSuffix(res.DataPath, modelSuffix))
decompress(res.DataRoot+"/"+res.DataPath, setting.Bucket+"/"+strings.TrimSuffix(res.DataPath, models.ModelSuffix))
case models.StorageScheduleProcessing:
log.Info("ScheduleDataToPeerByKey processing")
case models.StorageScheduleFailed:
@@ -89,7 +85,7 @@ func HandleScheduleRecords() error {
switch res.StatusCode {
case models.StorageScheduleSucceed:
log.Info("ScheduleDataToPeerByKey(%s) succeed", record.ObjectKey)
decompress(record.Bucket+"/"+record.ObjectKey, setting.Bucket+"/"+strings.TrimSuffix(record.ObjectKey, modelSuffix))
decompress(record.Bucket+"/"+record.ObjectKey, setting.Bucket+"/"+strings.TrimSuffix(record.ObjectKey, models.ModelSuffix))
case models.StorageScheduleProcessing:
log.Info("ScheduleDataToPeerByKey(%s) processing", record.ObjectKey)
case models.StorageScheduleFailed:


+ 3
- 1
routers/api/v1/repo/modelarts.go View File

@@ -182,7 +182,9 @@ func GetModelArtsTrainJobVersion(ctx *context.APIContext) {
if oldStatus != job.Status {
notification.NotifyChangeCloudbrainStatus(job, oldStatus)
if models.IsTrainJobTerminal(job.Status) {
urchin.GetBackNpuModel(job.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(job.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(job.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(job.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateTrainJobVersion(job)


+ 3
- 1
routers/repo/cloudbrain.go View File

@@ -1941,7 +1941,9 @@ func SyncCloudbrainStatus() {
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
if models.IsTrainJobTerminal(task.Status) {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateJob(task)


+ 5
- 1
routers/repo/grampus.go View File

@@ -872,7 +872,9 @@ func GrampusTrainJobShow(ctx *context.Context) {
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
if models.IsTrainJobTerminal(task.Status) {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateJob(task)
@@ -1079,6 +1081,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f " + datasetName + ";"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
@@ -1087,6 +1090,7 @@ func generateDatasetUnzipCommand(datasetName string) string {
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f " + datasetNameTemp + ";"
}

}


Loading…
Cancel
Save