From a423bdaf8028bc3699fd5becafee0f806b8d85af Mon Sep 17 00:00:00 2001 From: zouap Date: Tue, 15 Nov 2022 14:07:38 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8F=90=E4=BA=A4=E4=BB=A3=E7=A0=81=EF=BC=8C?= =?UTF-8?q?=E8=A7=A3=E5=86=B3Bug=E9=97=AE=E9=A2=98=E3=80=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: zouap --- models/file_chunk.go | 11 +- routers/repo/attachment_model.go | 227 ++++++++++++++++++--------------------- 2 files changed, 111 insertions(+), 127 deletions(-) diff --git a/models/file_chunk.go b/models/file_chunk.go index 339b5d7a4..cad7746b7 100755 --- a/models/file_chunk.go +++ b/models/file_chunk.go @@ -30,8 +30,9 @@ type FileChunk struct { type ModelFileChunk struct { ID int64 `xorm:"pk autoincr"` - UUID string `xorm:"uuid UNIQUE"` + UUID string `xorm:"INDEX"` Md5 string `xorm:"INDEX"` + ModelUUID string `xorm:"INDEX"` ObjectName string `xorm:"DEFAULT ''"` IsUploaded int `xorm:"DEFAULT 0"` // not uploaded: 0, uploaded: 1 UploadID string `xorm:"UNIQUE"` //minio upload id @@ -65,14 +66,14 @@ func GetFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int) (*Fi return getFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain) } -func GetModelFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int) (*ModelFileChunk, error) { - return getModelFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain) +func GetModelFileChunkByMD5AndUser(md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) { + return getModelFileChunkByMD5AndUser(x, md5, userID, typeCloudBrain, uuid) } -func getModelFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int) (*ModelFileChunk, error) { +func getModelFileChunkByMD5AndUser(e Engine, md5 string, userID int64, typeCloudBrain int, uuid string) (*ModelFileChunk, error) { fileChunk := new(ModelFileChunk) - if has, err := e.Where("md5 = ? and user_id = ? and type = ?", md5, userID, typeCloudBrain).Get(fileChunk); err != nil { + if has, err := e.Where("md5 = ? and user_id = ? and type = ? and model_uuid= ?", md5, userID, typeCloudBrain, uuid).Get(fileChunk); err != nil { return nil, err } else if !has { return nil, ErrFileChunkNotExist{md5, ""} diff --git a/routers/repo/attachment_model.go b/routers/repo/attachment_model.go index 15da4f54f..b5b21b06b 100644 --- a/routers/repo/attachment_model.go +++ b/routers/repo/attachment_model.go @@ -31,7 +31,7 @@ func GetModelChunks(ctx *context.Context) { return } - fileChunk, err := models.GetModelFileChunkByMD5AndUser(fileMD5, ctx.User.ID, typeCloudBrain) + fileChunk, err := models.GetModelFileChunkByMD5AndUser(fileMD5, ctx.User.ID, typeCloudBrain, modeluuid) if err != nil { if models.IsErrFileChunkNotExist(err) { ctx.JSON(200, map[string]string{ @@ -79,6 +79,21 @@ func GetModelChunks(ctx *context.Context) { log.Error("UpdateFileChunk failed:", err.Error()) } } + modelname := "" + model, err := models.QueryModelById(modeluuid) + if err == nil && model != nil { + modelname = model.Name + } + ctx.JSON(200, map[string]string{ + "uuid": fileChunk.UUID, + "uploaded": strconv.Itoa(fileChunk.IsUploaded), + "uploadID": fileChunk.UploadID, + "chunks": string(chunks), + "attachID": "0", + "modeluuid": modeluuid, + "fileName": fileName, + "modelName": modelname, + }) } else { if fileChunk.IsUploaded == models.FileUploaded { log.Info("the file has been recorded but not uploaded") @@ -107,120 +122,103 @@ func GetModelChunks(ctx *context.Context) { "uploadID": "", "chunks": "", }) - return - } - } - - var attachID int64 - attach, err := models.GetAttachmentByUUID(fileChunk.UUID) - if err != nil { - if models.IsErrAttachmentNotExist(err) { - attachID = 0 - } else { - ctx.ServerError("GetAttachmentByUUID", err) - return - } - } else { - attachID = attach.ID - } - - if attach == nil { - ctx.JSON(200, map[string]string{ - "uuid": fileChunk.UUID, - "uploaded": strconv.Itoa(fileChunk.IsUploaded), - "uploadID": fileChunk.UploadID, - "chunks": string(chunks), - "attachID": "0", - "datasetID": "0", - "fileName": "", - "datasetName": "", - }) - return - } - - //使用description存储模型信息 - dbmodeluuid := attach.Description - modelname := "" - if dbmodeluuid != modeluuid { - log.Info("The file has uploaded.fileChunk.ObjectName=" + fileChunk.ObjectName + " typeCloudBrain=" + fmt.Sprint(typeCloudBrain)) - isExist := copyModelAttachmentFile(typeCloudBrain, fileChunk, fileName, modeluuid) - if isExist { - model, err := models.QueryModelById(modeluuid) - if err == nil && model != nil { - modelname = model.Name - } - ctx.JSON(200, map[string]string{ - "uuid": fileChunk.UUID, - "uploaded": strconv.Itoa(fileChunk.IsUploaded), - "uploadID": fileChunk.UploadID, - "chunks": string(chunks), - "attachID": strconv.Itoa(int(attachID)), - "modeluuid": modeluuid, - "fileName": attach.Name, - "modelName": modelname, - }) } else { - UpdateModelSize(modeluuid) ctx.JSON(200, map[string]string{ - "uuid": fileChunk.UUID, - "uploaded": strconv.Itoa(fileChunk.IsUploaded), - "uploadID": fileChunk.UploadID, - "chunks": string(chunks), - "attachID": strconv.Itoa(int(attachID)), - "fileName": attach.Name, + "uuid": fileChunk.UUID, + "uploaded": strconv.Itoa(fileChunk.IsUploaded), + "uploadID": fileChunk.UploadID, + "chunks": string(chunks), + "attachID": "0", + "datasetID": "0", + "fileName": "", + "datasetName": "", }) } - return - } else { - model, err := models.QueryModelById(dbmodeluuid) - if err == nil { - modelname = model.Name - } - ctx.JSON(200, map[string]string{ - "uuid": fileChunk.UUID, - "uploaded": strconv.Itoa(fileChunk.IsUploaded), - "uploadID": fileChunk.UploadID, - "chunks": string(chunks), - "attachID": strconv.Itoa(int(attachID)), - "modeluuid": dbmodeluuid, - "fileName": attach.Name, - "modelName": modelname, - }) - return } -} - -func copyModelAttachmentFile(typeCloudBrain int, fileChunk *models.ModelFileChunk, fileName, modeluuid string) bool { - srcObjectName := fileChunk.ObjectName - var isExist bool - //copy - destObjectName := getObjectName(fileName, modeluuid) - if typeCloudBrain == models.TypeCloudBrainOne { - bucketName := setting.Attachment.Minio.Bucket - log.Info("minio copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) - if storage.MinioGetFilesSize(bucketName, []string{destObjectName}) > 0 { - isExist = true - } else { + // //使用description存储模型信息 + // dbmodeluuid := attach.Description + // modelname := "" + // if dbmodeluuid != modeluuid { + // log.Info("The file has uploaded.fileChunk.ObjectName=" + fileChunk.ObjectName + " typeCloudBrain=" + fmt.Sprint(typeCloudBrain)) + // isExist := copyModelAttachmentFile(typeCloudBrain, fileChunk, fileName, modeluuid) + // if isExist { + // model, err := models.QueryModelById(modeluuid) + // if err == nil && model != nil { + // modelname = model.Name + // } + // ctx.JSON(200, map[string]string{ + // "uuid": fileChunk.UUID, + // "uploaded": strconv.Itoa(fileChunk.IsUploaded), + // "uploadID": fileChunk.UploadID, + // "chunks": string(chunks), + // "attachID": strconv.Itoa(int(attachID)), + // "modeluuid": modeluuid, + // "fileName": attach.Name, + // "modelName": modelname, + // }) + // } else { + // UpdateModelSize(modeluuid) + // ctx.JSON(200, map[string]string{ + // "uuid": fileChunk.UUID, + // "uploaded": strconv.Itoa(fileChunk.IsUploaded), + // "uploadID": fileChunk.UploadID, + // "chunks": string(chunks), + // "attachID": strconv.Itoa(int(attachID)), + // "fileName": attach.Name, + // }) + // } + // return + // } else { + // model, err := models.QueryModelById(dbmodeluuid) + // if err == nil { + // modelname = model.Name + // } + // ctx.JSON(200, map[string]string{ + // "uuid": fileChunk.UUID, + // "uploaded": strconv.Itoa(fileChunk.IsUploaded), + // "uploadID": fileChunk.UploadID, + // "chunks": string(chunks), + // "attachID": strconv.Itoa(int(attachID)), + // "modeluuid": dbmodeluuid, + // "fileName": attach.Name, + // "modelName": modelname, + // }) + // return + // } - log.Info("minio copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) - storage.MinioCopyAFile(bucketName, srcObjectName, bucketName, destObjectName) - } - } else { - bucketName := setting.Bucket - log.Info("obs copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName + " destObjectName=" + destObjectName) - size := storage.ObsGetFilesSize(bucketName, []string{destObjectName}) - log.Info("size=" + fmt.Sprint(size)) - if size > 0 { - isExist = true - } else { - log.Info("obs copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) - storage.ObsCopyFile(bucketName, srcObjectName, bucketName, destObjectName) - } - } - return isExist } +// func copyModelAttachmentFile(typeCloudBrain int, fileChunk *models.ModelFileChunk, fileName, modeluuid string) bool { +// srcObjectName := fileChunk.ObjectName +// var isExist bool +// //copy +// destObjectName := getObjectName(fileName, modeluuid) +// if typeCloudBrain == models.TypeCloudBrainOne { +// bucketName := setting.Attachment.Minio.Bucket +// log.Info("minio copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) +// if storage.MinioGetFilesSize(bucketName, []string{destObjectName}) > 0 { +// isExist = true +// } else { + +// log.Info("minio copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) +// storage.MinioCopyAFile(bucketName, srcObjectName, bucketName, destObjectName) +// } +// } else { +// bucketName := setting.Bucket +// log.Info("obs copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName + " destObjectName=" + destObjectName) +// size := storage.ObsGetFilesSize(bucketName, []string{destObjectName}) +// log.Info("size=" + fmt.Sprint(size)) +// if size > 0 { +// isExist = true +// } else { +// log.Info("obs copy..srcObjectName=" + srcObjectName + " bucketName=" + bucketName) +// storage.ObsCopyFile(bucketName, srcObjectName, bucketName, destObjectName) +// } +// } +// return isExist +// } + func getObjectName(filename string, modeluuid string) string { return strings.TrimPrefix(path.Join(Model_prefix, path.Join(modeluuid[0:1], modeluuid[1:2], modeluuid, filename)), "/") } @@ -286,6 +284,7 @@ func NewModelMultipart(ctx *context.Context) { Md5: ctx.Query("md5"), Size: fileSize, ObjectName: objectName, + ModelUUID: modeluuid, TotalChunks: totalChunkCounts, Type: typeCloudBrain, }) @@ -361,7 +360,6 @@ func CompleteModelMultipart(ctx *context.Context) { uuid := ctx.Query("uuid") uploadID := ctx.Query("uploadID") typeCloudBrain := ctx.QueryInt("type") - fileName := ctx.Query("file_name") modeluuid := ctx.Query("modeluuid") log.Warn("uuid:" + uuid) log.Warn("modeluuid:" + modeluuid) @@ -406,21 +404,6 @@ func CompleteModelMultipart(ctx *context.Context) { //更新模型大小信息 UpdateModelSize(modeluuid) - _, err = models.InsertAttachment(&models.Attachment{ - UUID: uuid, - UploaderID: ctx.User.ID, - IsPrivate: true, - Name: fileName, - Size: ctx.QueryInt64("size"), - DatasetID: 0, - Description: modeluuid, - Type: typeCloudBrain, - }) - - if err != nil { - ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err)) - return - } ctx.JSON(200, map[string]string{ "result_code": "0", })