@@ -1291,7 +1291,8 @@ func QueryModelTrainJobVersionList(jobId string) ([]*CloudbrainInfo, int, error) | |||
builder.Eq{"cloudbrain.job_id": jobId}, | |||
) | |||
cond = cond.And( | |||
builder.Eq{"cloudbrain.Status": "COMPLETED"}, | |||
builder.In("cloudbrain.Status", "COMPLETED", "SUCCEEDED"), | |||
//builder.Eq{"cloudbrain.Status": "COMPLETED"}, | |||
) | |||
sess.OrderBy("cloudbrain.created_unix DESC") | |||
@@ -1312,7 +1313,7 @@ func QueryModelTrainJobList(repoId int64) ([]*CloudbrainInfo, int, error) { | |||
builder.Eq{"repo_id": repoId}, | |||
) | |||
cond = cond.And( | |||
builder.Eq{"Status": "COMPLETED"}, | |||
builder.In("Status", "COMPLETED", "SUCCEEDED"), | |||
) | |||
cond = cond.And( | |||
builder.Eq{"job_type": "TRAIN"}, | |||
@@ -39,6 +39,14 @@ func (l *LocalStorage) Open(path string) (io.ReadCloser, error) { | |||
return f, nil | |||
} | |||
func (l *LocalStorage) DownloadAFile(bucket string, objectName string) (io.ReadCloser, error) { | |||
f, err := os.Open(filepath.Join(l.dir, objectName)) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return f, nil | |||
} | |||
// Save save a file | |||
func (l *LocalStorage) Save(path string, r io.Reader) (int64, error) { | |||
p := filepath.Join(l.dir, path) | |||
@@ -59,6 +59,16 @@ func (m *MinioStorage) buildMinioPath(p string) string { | |||
return strings.TrimPrefix(path.Join(m.basePath, p), "/") | |||
} | |||
func (m *MinioStorage) DownloadAFile(bucket string, objectName string) (io.ReadCloser, error) { | |||
var opts = minio.GetObjectOptions{} | |||
object, err := m.client.GetObject(m.bucket, objectName, opts) | |||
if err != nil { | |||
return nil, err | |||
} | |||
return object, nil | |||
} | |||
// Open open a file | |||
func (m *MinioStorage) Open(path string) (io.ReadCloser, error) { | |||
var opts = minio.GetObjectOptions{} | |||
@@ -113,7 +113,141 @@ func GenMultiPartSignedUrl(uuid string, uploadId string, partNumber int, partSiz | |||
objectName := strings.TrimPrefix(path.Join(minio.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid)), "/") | |||
return minioClient.GenUploadPartSignedUrl(uploadId, bucketName, objectName, partNumber, partSize, PresignedUploadPartUrlExpireTime, setting.Attachment.Minio.Location) | |||
} | |||
func GetAllObjectByBucketAndPrefixMinio(bucket string, prefix string) ([]FileInfo, error) { | |||
_, core, err := getClients() | |||
if err != nil { | |||
log.Error("getClients failed:", err.Error()) | |||
return nil, err | |||
} | |||
prefixLen := len(prefix) | |||
delimiter := "" | |||
marker := "" | |||
index := 1 | |||
fileInfoList := FileInfoList{} | |||
for { | |||
output, err := core.ListObjects(bucket, prefix, marker, delimiter, 1000) | |||
if err == nil { | |||
log.Info("Page:%d\n", index) | |||
index++ | |||
for _, val := range output.Contents { | |||
var isDir bool | |||
if prefixLen == len(val.Key) { | |||
continue | |||
} | |||
if strings.HasSuffix(val.Key, "/") { | |||
isDir = true | |||
} else { | |||
isDir = false | |||
} | |||
fileInfo := FileInfo{ | |||
ModTime: val.LastModified.Format("2006-01-02 15:04:05"), | |||
FileName: val.Key[prefixLen:], | |||
Size: val.Size, | |||
IsDir: isDir, | |||
ParenDir: "", | |||
} | |||
fileInfoList = append(fileInfoList, fileInfo) | |||
} | |||
if output.IsTruncated { | |||
marker = output.NextMarker | |||
} else { | |||
break | |||
} | |||
} else { | |||
log.Info("list error." + err.Error()) | |||
return nil, err | |||
} | |||
} | |||
sort.Sort(fileInfoList) | |||
return fileInfoList, nil | |||
} | |||
func GetOneLevelAllObjectUnderDirMinio(bucket string, prefixRootPath string, relativePath string) ([]FileInfo, error) { | |||
_, core, err := getClients() | |||
if err != nil { | |||
log.Error("getClients failed:", err.Error()) | |||
return nil, err | |||
} | |||
Prefix := prefixRootPath + relativePath | |||
if !strings.HasSuffix(Prefix, "/") { | |||
Prefix += "/" | |||
} | |||
log.Info("bucket=" + bucket + " Prefix=" + Prefix) | |||
output, err := core.ListObjects(bucket, Prefix, "", "", 1000) | |||
fileInfos := make([]FileInfo, 0) | |||
prefixLen := len(Prefix) | |||
if err == nil { | |||
for _, val := range output.Contents { | |||
log.Info("val key=" + val.Key) | |||
var isDir bool | |||
var fileName string | |||
if val.Key == Prefix { | |||
continue | |||
} | |||
if strings.Contains(val.Key[prefixLen:len(val.Key)-1], "/") { | |||
continue | |||
} | |||
if strings.HasSuffix(val.Key, "/") { | |||
isDir = true | |||
fileName = val.Key[prefixLen : len(val.Key)-1] | |||
relativePath += val.Key[prefixLen:] | |||
} else { | |||
isDir = false | |||
fileName = val.Key[prefixLen:] | |||
} | |||
fileInfo := FileInfo{ | |||
ModTime: val.LastModified.Local().Format("2006-01-02 15:04:05"), | |||
FileName: fileName, | |||
Size: val.Size, | |||
IsDir: isDir, | |||
ParenDir: relativePath, | |||
} | |||
fileInfos = append(fileInfos, fileInfo) | |||
} | |||
return fileInfos, err | |||
} else { | |||
log.Error("Message:%s", err.Error()) | |||
return nil, err | |||
} | |||
} | |||
func MinioPathCopy(bucketName string, srcPath string, destPath string) (int64, error) { | |||
_, core, err := getClients() | |||
var fileTotalSize int64 | |||
fileTotalSize = 0 | |||
if err != nil { | |||
log.Error("getClients failed:", err.Error()) | |||
return fileTotalSize, err | |||
} | |||
delimiter := "" | |||
marker := "" | |||
for { | |||
output, err := core.ListObjects(bucketName, srcPath, marker, delimiter, 1000) | |||
if err == nil { | |||
for _, val := range output.Contents { | |||
srcObjectName := val.Key | |||
destObjectName := destPath + srcObjectName[len(srcPath):] | |||
log.Info("srcObjectName=" + srcObjectName + " destObjectName=" + destObjectName) | |||
core.CopyObject(bucketName, srcObjectName, bucketName, destObjectName, val.UserMetadata) | |||
fileTotalSize += val.Size | |||
} | |||
if output.IsTruncated { | |||
marker = output.NextMarker | |||
} else { | |||
break | |||
} | |||
} else { | |||
log.Info("list error." + err.Error()) | |||
return 0, err | |||
} | |||
} | |||
return fileTotalSize, nil | |||
} | |||
func NewMultiPartUpload(uuid string) (string, error) { | |||
@@ -22,6 +22,7 @@ const ( | |||
type ObjectStorage interface { | |||
Save(path string, r io.Reader) (int64, error) | |||
Open(path string) (io.ReadCloser, error) | |||
DownloadAFile(bucket string, objectName string) (io.ReadCloser, error) | |||
Delete(path string) error | |||
DeleteDir(dir string) error | |||
PresignedGetURL(path string, fileName string) (string, error) | |||
@@ -6,6 +6,7 @@ import ( | |||
"errors" | |||
"fmt" | |||
"net/http" | |||
"net/url" | |||
"path" | |||
"strings" | |||
@@ -27,19 +28,22 @@ const ( | |||
MODEL_NOT_LATEST = 0 | |||
) | |||
func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, ctx *context.Context) error { | |||
func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error { | |||
aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName) | |||
if err != nil { | |||
log.Info("query task error." + err.Error()) | |||
return err | |||
aiTask, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, jobId) | |||
if err != nil { | |||
log.Info("query task error." + err.Error()) | |||
return err | |||
} else { | |||
log.Info("query gpu train task.") | |||
} | |||
} | |||
uuid := uuid.NewV4() | |||
id := uuid.String() | |||
modelPath := id | |||
var lastNewModelId string | |||
var modelSize int64 | |||
cloudType := models.TypeCloudBrainTwo | |||
log.Info("find task name:" + aiTask.JobName) | |||
aimodels := models.QueryModelByName(name, aiTask.RepoID) | |||
@@ -53,7 +57,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio | |||
} | |||
} | |||
} | |||
cloudType = aiTask.Type | |||
cloudType := aiTask.Type | |||
//download model zip //train type | |||
if cloudType == models.TypeCloudBrainTwo { | |||
modelPath, modelSize, err = downloadModelFromCloudBrainTwo(id, aiTask.JobName, "", aiTask.TrainUrl) | |||
@@ -61,6 +65,12 @@ func saveModelByParameters(jobId string, versionName string, name string, versio | |||
log.Info("download model from CloudBrainTwo faild." + err.Error()) | |||
return err | |||
} | |||
} else if cloudType == models.TypeCloudBrainOne { | |||
modelPath, modelSize, err = downloadModelFromCloudBrainOne(id, aiTask.JobName, "", aiTask.TrainUrl) | |||
if err != nil { | |||
log.Info("download model from CloudBrainOne faild." + err.Error()) | |||
return err | |||
} | |||
} | |||
accuracy := make(map[string]string) | |||
accuracy["F1"] = "" | |||
@@ -131,7 +141,7 @@ func SaveNewNameModel(ctx *context.Context) { | |||
return | |||
} | |||
SaveModel(ctx) | |||
ctx.Status(200) | |||
log.Info("save model end.") | |||
} | |||
@@ -143,8 +153,9 @@ func SaveModel(ctx *context.Context) { | |||
version := ctx.Query("Version") | |||
label := ctx.Query("Label") | |||
description := ctx.Query("Description") | |||
engine := ctx.QueryInt("Engine") | |||
trainTaskCreate := ctx.QueryBool("trainTaskCreate") | |||
log.Info("engine=" + fmt.Sprint(engine)) | |||
if !trainTaskCreate { | |||
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) { | |||
//ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | |||
@@ -163,14 +174,14 @@ func SaveModel(ctx *context.Context) { | |||
return | |||
} | |||
err := saveModelByParameters(JobId, VersionName, name, version, label, description, ctx) | |||
err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx) | |||
if err != nil { | |||
log.Info("save model error." + err.Error()) | |||
ctx.Error(500, fmt.Sprintf("save model error. %v", err)) | |||
return | |||
} | |||
ctx.Status(200) | |||
log.Info("save model end.") | |||
} | |||
@@ -199,6 +210,22 @@ func downloadModelFromCloudBrainTwo(modelUUID string, jobName string, parentDir | |||
return dataActualPath, size, nil | |||
} | |||
func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir string, trainUrl string) (string, int64, error) { | |||
modelActualPath := storage.GetMinioPath(jobName, "/model/") | |||
log.Info("modelActualPath=" + modelActualPath) | |||
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/" | |||
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/" | |||
bucketName := setting.Attachment.Minio.Bucket | |||
log.Info("destKeyNamePrefix=" + destKeyNamePrefix + " modelSrcPrefix=" + modelSrcPrefix + " bucket=" + bucketName) | |||
size, err := storage.MinioPathCopy(bucketName, modelSrcPrefix, destKeyNamePrefix) | |||
if err == nil { | |||
dataActualPath := bucketName + "/" + destKeyNamePrefix | |||
return dataActualPath, size, nil | |||
} else { | |||
return "", 0, nil | |||
} | |||
} | |||
func DeleteModel(ctx *context.Context) { | |||
log.Info("delete model start.") | |||
id := ctx.Query("ID") | |||
@@ -277,51 +304,117 @@ func DownloadMultiModelFile(ctx *context.Context) { | |||
} | |||
path := Model_prefix + models.AttachmentRelativePath(id) + "/" | |||
if task.Type == models.TypeCloudBrainTwo { | |||
downloadFromCloudBrainTwo(path, task, ctx, id) | |||
} else if task.Type == models.TypeCloudBrainOne { | |||
downloadFromCloudBrainOne(path, task, ctx, id) | |||
} | |||
} | |||
allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path) | |||
func MinioDownloadManyFile(path string, ctx *context.Context, returnFileName string, allFile []storage.FileInfo) { | |||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(returnFileName)) | |||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
w := zip.NewWriter(ctx.Resp) | |||
defer w.Close() | |||
for _, oneFile := range allFile { | |||
if oneFile.IsDir { | |||
log.Info("zip dir name:" + oneFile.FileName) | |||
} else { | |||
log.Info("zip file name:" + oneFile.FileName) | |||
fDest, err := w.Create(oneFile.FileName) | |||
if err != nil { | |||
log.Info("create zip entry error, download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} | |||
log.Info("minio file path=" + (path + oneFile.FileName)) | |||
body, err := storage.Attachments.DownloadAFile(setting.Attachment.Minio.Bucket, path+oneFile.FileName) | |||
if err != nil { | |||
log.Info("download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} else { | |||
defer body.Close() | |||
p := make([]byte, 1024) | |||
var readErr error | |||
var readCount int | |||
// 读取对象内容 | |||
for { | |||
readCount, readErr = body.Read(p) | |||
if readCount > 0 { | |||
fDest.Write(p[:readCount]) | |||
} | |||
if readErr != nil { | |||
break | |||
} | |||
} | |||
} | |||
} | |||
} | |||
} | |||
func downloadFromCloudBrainOne(path string, task *models.AiModelManage, ctx *context.Context, id string) { | |||
allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, path) | |||
if err == nil { | |||
//count++ | |||
models.ModifyModelDownloadCount(id) | |||
returnFileName := task.Name + "_" + task.Version + ".zip" | |||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+returnFileName) | |||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
w := zip.NewWriter(ctx.Resp) | |||
defer w.Close() | |||
for _, oneFile := range allFile { | |||
if oneFile.IsDir { | |||
log.Info("zip dir name:" + oneFile.FileName) | |||
MinioDownloadManyFile(path, ctx, returnFileName, allFile) | |||
} else { | |||
log.Info("error,msg=" + err.Error()) | |||
ctx.ServerError("no file to download.", err) | |||
} | |||
} | |||
func ObsDownloadManyFile(path string, ctx *context.Context, returnFileName string, allFile []storage.FileInfo) { | |||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(returnFileName)) | |||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
w := zip.NewWriter(ctx.Resp) | |||
defer w.Close() | |||
for _, oneFile := range allFile { | |||
if oneFile.IsDir { | |||
log.Info("zip dir name:" + oneFile.FileName) | |||
} else { | |||
log.Info("zip file name:" + oneFile.FileName) | |||
fDest, err := w.Create(oneFile.FileName) | |||
if err != nil { | |||
log.Info("create zip entry error, download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} | |||
body, err := storage.ObsDownloadAFile(setting.Bucket, path+oneFile.FileName) | |||
if err != nil { | |||
log.Info("download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} else { | |||
log.Info("zip file name:" + oneFile.FileName) | |||
fDest, err := w.Create(oneFile.FileName) | |||
if err != nil { | |||
log.Info("create zip entry error, download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} | |||
body, err := storage.ObsDownloadAFile(setting.Bucket, path+oneFile.FileName) | |||
if err != nil { | |||
log.Info("download file failed: %s\n", err.Error()) | |||
ctx.ServerError("download file failed:", err) | |||
return | |||
} else { | |||
defer body.Close() | |||
p := make([]byte, 1024) | |||
var readErr error | |||
var readCount int | |||
// 读取对象内容 | |||
for { | |||
readCount, readErr = body.Read(p) | |||
if readCount > 0 { | |||
fDest.Write(p[:readCount]) | |||
} | |||
if readErr != nil { | |||
break | |||
} | |||
defer body.Close() | |||
p := make([]byte, 1024) | |||
var readErr error | |||
var readCount int | |||
// 读取对象内容 | |||
for { | |||
readCount, readErr = body.Read(p) | |||
if readCount > 0 { | |||
fDest.Write(p[:readCount]) | |||
} | |||
if readErr != nil { | |||
break | |||
} | |||
} | |||
} | |||
} | |||
} | |||
} | |||
func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *context.Context, id string) { | |||
allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path) | |||
if err == nil { | |||
//count++ | |||
models.ModifyModelDownloadCount(id) | |||
returnFileName := task.Name + "_" + task.Version + ".zip" | |||
ObsDownloadManyFile(path, ctx, returnFileName, allFile) | |||
} else { | |||
log.Info("error,msg=" + err.Error()) | |||
ctx.ServerError("no file to download.", err) | |||
@@ -374,42 +467,55 @@ func DownloadSingleModelFile(ctx *context.Context) { | |||
ctx.NotFound(ctx.Req.URL.RequestURI(), nil) | |||
return | |||
} | |||
if setting.PROXYURL != "" { | |||
body, err := storage.ObsDownloadAFile(setting.Bucket, path) | |||
if err != nil { | |||
log.Info("download error.") | |||
if task.Type == models.TypeCloudBrainTwo { | |||
if setting.PROXYURL != "" { | |||
body, err := storage.ObsDownloadAFile(setting.Bucket, path) | |||
if err != nil { | |||
log.Info("download error.") | |||
} else { | |||
//count++ | |||
models.ModifyModelDownloadCount(id) | |||
defer body.Close() | |||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName) | |||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
p := make([]byte, 1024) | |||
var readErr error | |||
var readCount int | |||
// 读取对象内容 | |||
for { | |||
readCount, readErr = body.Read(p) | |||
if readCount > 0 { | |||
ctx.Resp.Write(p[:readCount]) | |||
//fmt.Printf("%s", p[:readCount]) | |||
} | |||
if readErr != nil { | |||
break | |||
} | |||
} | |||
} | |||
} else { | |||
url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, path) | |||
if err != nil { | |||
log.Error("GetObsCreateSignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) | |||
ctx.ServerError("GetObsCreateSignedUrl", err) | |||
return | |||
} | |||
//count++ | |||
models.ModifyModelDownloadCount(id) | |||
defer body.Close() | |||
ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+fileName) | |||
ctx.Resp.Header().Set("Content-Type", "application/octet-stream") | |||
p := make([]byte, 1024) | |||
var readErr error | |||
var readCount int | |||
// 读取对象内容 | |||
for { | |||
readCount, readErr = body.Read(p) | |||
if readCount > 0 { | |||
ctx.Resp.Write(p[:readCount]) | |||
//fmt.Printf("%s", p[:readCount]) | |||
} | |||
if readErr != nil { | |||
break | |||
} | |||
} | |||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||
} | |||
} else { | |||
url, err := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, path) | |||
} else if task.Type == models.TypeCloudBrainOne { | |||
log.Info("start to down load minio file.") | |||
url, err := storage.Attachments.PresignedGetURL(path, fileName) | |||
if err != nil { | |||
log.Error("GetObsCreateSignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) | |||
ctx.ServerError("GetObsCreateSignedUrl", err) | |||
log.Error("Get minio get SignedUrl failed: %v", err.Error(), ctx.Data["msgID"]) | |||
ctx.ServerError("Get minio get SignedUrl failed", err) | |||
return | |||
} | |||
//count++ | |||
models.ModifyModelDownloadCount(id) | |||
http.Redirect(ctx.Resp, ctx.Req.Request, url, http.StatusMovedPermanently) | |||
} | |||
} | |||
func ShowModelInfo(ctx *context.Context) { | |||
@@ -684,14 +790,22 @@ func QueryModelListForPredict(ctx *context.Context) { | |||
func QueryModelFileForPredict(ctx *context.Context) { | |||
id := ctx.Query("ID") | |||
model, err := models.QueryModelById(id) | |||
if err != nil { | |||
if err == nil { | |||
if model.Type == models.TypeCloudBrainTwo { | |||
prefix := model.Path[len(setting.Bucket)+1:] | |||
fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
} else if model.Type == models.TypeCloudBrainOne { | |||
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:] | |||
fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
} | |||
} else { | |||
log.Error("no such model!", err.Error()) | |||
ctx.ServerError("no such model:", err) | |||
return | |||
} | |||
prefix := model.Path[len(setting.Bucket)+1:] | |||
fileinfos, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
} | |||
func QueryOneLevelModelFile(ctx *context.Context) { | |||
@@ -703,7 +817,16 @@ func QueryOneLevelModelFile(ctx *context.Context) { | |||
ctx.ServerError("no such model:", err) | |||
return | |||
} | |||
prefix := model.Path[len(setting.Bucket)+1:] | |||
fileinfos, err := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, parentDir) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
if model.Type == models.TypeCloudBrainTwo { | |||
log.Info("TypeCloudBrainTwo list model file.") | |||
prefix := model.Path[len(setting.Bucket)+1:] | |||
fileinfos, _ := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, parentDir) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
} else if model.Type == models.TypeCloudBrainOne { | |||
log.Info("TypeCloudBrainOne list model file.") | |||
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:] | |||
fileinfos, _ := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, parentDir) | |||
ctx.JSON(http.StatusOK, fileinfos) | |||
} | |||
} |
@@ -1093,9 +1093,9 @@ func RegisterRoutes(m *macaron.Macaron) { | |||
m.Get("/show_model_child_api", repo.ShowOneVersionOtherModel) | |||
m.Get("/query_train_job", reqRepoCloudBrainReader, repo.QueryTrainJobList) | |||
m.Get("/query_train_job_version", reqRepoCloudBrainReader, repo.QueryTrainJobVersionList) | |||
m.Get("/query_model_for_predict", reqRepoCloudBrainReader, repo.QueryModelListForPredict) | |||
m.Get("/query_modelfile_for_predict", reqRepoCloudBrainReader, repo.QueryModelFileForPredict) | |||
m.Get("/query_onelevel_modelfile", reqRepoCloudBrainReader, repo.QueryOneLevelModelFile) | |||
m.Get("/query_model_for_predict", reqRepoModelManageReader, repo.QueryModelListForPredict) | |||
m.Get("/query_modelfile_for_predict", reqRepoModelManageReader, repo.QueryModelFileForPredict) | |||
m.Get("/query_onelevel_modelfile", reqRepoModelManageReader, repo.QueryOneLevelModelFile) | |||
m.Group("/:ID", func() { | |||
m.Get("", repo.ShowSingleModel) | |||
m.Get("/downloadsingle", repo.DownloadSingleModelFile) | |||
@@ -130,6 +130,19 @@ | |||
<label>模型版本</label> | |||
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255"> | |||
</div> | |||
<div class="unite min_title inline field required"> | |||
<label>模型框架</label> | |||
<div class="ui dropdown selection search width70" id="choice_Engine"> | |||
<input type="hidden" id="Engine" name="Engine" required> | |||
<div class="default text">选择模型框架</div> | |||
<i class="dropdown icon"></i> | |||
<div class="menu" id="job-Engine"> | |||
</div> | |||
</div> | |||
</div> | |||
<div class="inline field"> | |||
<label>模型标签</label> | |||
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'> | |||
@@ -161,7 +174,7 @@ | |||
let repoId = {{$repository}} | |||
const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config; | |||
$('input[name="_csrf"]').val(csrf) | |||
let modelData; | |||
function createModelName(){ | |||
let repoName = location.pathname.split('/')[2] | |||
let modelName = repoName + '_model_' + Math.random().toString(36).substr(2, 4) | |||
@@ -185,6 +198,7 @@ | |||
document.getElementById("formId").reset(); | |||
$('#choice_model').dropdown('clear') | |||
$('#choice_version').dropdown('clear') | |||
$('#choice_Engine').dropdown('clear') | |||
$('.ui.dimmer').css({"background-color":""}) | |||
$('.ui.error.message').text() | |||
$('.ui.error.message').css('display','none') | |||
@@ -197,10 +211,24 @@ | |||
$(function(){ | |||
$('#choice_model').dropdown({ | |||
onChange:function(value){ | |||
$(".ui.dropdown.selection.search.width70").addClass("loading") | |||
$('#choice_version').dropdown('clear') | |||
$("#job-version").empty() | |||
loadTrainVersion(value) | |||
$(".ui.dropdown.selection.search.width70").addClass("loading") | |||
$('#choice_version').dropdown('clear') | |||
$("#job-version").empty() | |||
loadTrainVersion(value) | |||
} | |||
}) | |||
$('#choice_version').dropdown({ | |||
onChange:function(value){ | |||
console.log("model version:" + value); | |||
if(modelData != null){ | |||
for(var i=0; i < modelData.length;i++){ | |||
if(modelData[i].VersionName == value){ | |||
setEngine(modelData[i]) | |||
break; | |||
} | |||
} | |||
} | |||
} | |||
}) | |||
}) | |||
@@ -240,7 +268,8 @@ | |||
let JobID = !value ?$('#choice_model input[name="JobId"]').val(): value | |||
$.get(`${repolink}/modelmanage/query_train_job_version?JobID=${JobID}`, (data) => { | |||
const n_length = data.length | |||
let train_html='' | |||
let train_html=''; | |||
modelData = data; | |||
for (let i=0;i<n_length;i++){ | |||
train_html += `<div class="item" data-value="${data[i].VersionName}">${data[i].VersionName}</div>` | |||
train_html += '</div>' | |||
@@ -248,11 +277,59 @@ | |||
if(data.length){ | |||
$("#job-version").append(train_html) | |||
$(".ui.dropdown.selection.search.width70").removeClass("loading") | |||
$('#choice_version .default.text').text(data[0].VersionName) | |||
$('#choice_version input[name="VersionName"]').val(data[0].VersionName) | |||
var versionName = data[0].VersionName; | |||
if(versionName==null || versionName==""){ | |||
versionName="V0001"; | |||
} | |||
$('#choice_version .default.text').text(versionName) | |||
$('#choice_version input[name="VersionName"]').val(versionName) | |||
console.log("1111111111"); | |||
setEngine(data[0]) | |||
} | |||
}) | |||
} | |||
function setEngine(modelVersion){ | |||
console.log("modelVersion=" + modelVersion); | |||
$('#choice_Engine').dropdown('clear') | |||
$("#job-Engine").empty() | |||
if(modelVersion.EngineName != null && modelVersion.EngineName != ""){ | |||
srcEngine = modelVersion.EngineName.split('-')[0] | |||
srcEngine = srcEngine.trim(); | |||
let selectedText = "Pytorch"; | |||
let selectedValue = 0; | |||
let itemHtml = "<option class=\"item\" data-value=\"0\">Pytorch</option>"; | |||
if(srcEngine =='TensorFlow'){ | |||
selectedText ="TensorFlow"; | |||
selectedValue = 1; | |||
itemHtml += "<option class=\"active item\" data-value=\"1\">TensorFlow</option>"; | |||
}else{ | |||
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>"; | |||
} | |||
if(srcEngine =='MindSpore'){ | |||
selectedText ="MindSpore"; | |||
selectedValue = 2; | |||
itemHtml += "<option class=\"active item\" data-value=\"2\">MindSpore</option>"; | |||
}else{ | |||
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>"; | |||
} | |||
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>" | |||
$('#choice_Engine .default.text').text(selectedText) | |||
$('#choice_Engine input[name="Engine"]').val(selectedValue) | |||
$("#job-Engine").append(itemHtml); | |||
$("#choice_Engine").addClass('disabled') | |||
}else{ | |||
let itemHtml = "<option class=\"active item\" data-value=\"0\">Pytorch</option>"; | |||
itemHtml += "<option class=\"item\" data-value=\"1\">TensorFlow</option>" | |||
itemHtml += "<option class=\"item\" data-value=\"2\">MindSpore</option>" | |||
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>" | |||
$('#choice_Engine .default.text').text("Pytorch"); | |||
$('#choice_Engine input[name="Engine"]').val(0) | |||
$("#job-Engine").append(itemHtml); | |||
$("#choice_Engine").removeClass('disabled'); | |||
} | |||
} | |||
</script> | |||
@@ -168,7 +168,7 @@ export default { | |||
tableData= res.data | |||
for(let i=0;i<tableData.length;i++){ | |||
TrainTaskInfo = JSON.parse(tableData[i].TrainTaskInfo) | |||
tableData[i].EngineName = TrainTaskInfo.EngineName.split('-')[0] | |||
tableData[i].EngineName = this.getEngineName(tableData[i]) | |||
tableData[i].ComputeResource = TrainTaskInfo.ComputeResource | |||
tableData[i].cName=tableData[i].Name | |||
tableData[i].Name='' | |||
@@ -218,6 +218,7 @@ export default { | |||
$('input[name="Name"]').removeAttr('readonly') | |||
$('#choice_model').dropdown('clear') | |||
$('#choice_version').dropdown('clear') | |||
$('#choice_Engine').dropdown('clear') | |||
$('.ui.dimmer').css({"background-color":""}) | |||
$('.ui.error.message').text() | |||
$('.ui.error.message').css('display','none') | |||
@@ -347,6 +348,18 @@ export default { | |||
}) | |||
.modal('show') | |||
}, | |||
getEngineName(model){ | |||
if(model.Engine == 0){ | |||
return "Pytorch"; | |||
}else if(model.Engine == 1 || model.Engine == 121){ | |||
return "TensorFlow"; | |||
}else if(model.Engine == 2 || model.Engine == 122){ | |||
return "MindSpore"; | |||
}else{ | |||
return "Other" | |||
} | |||
}, | |||
getModelList(){ | |||
try { | |||
this.$refs.table.store.states.lazyTreeNodeMap = {} | |||
@@ -360,7 +373,7 @@ export default { | |||
for(let i=0;i<this.tableData.length;i++){ | |||
TrainTaskInfo = JSON.parse(this.tableData[i].TrainTaskInfo) | |||
this.tableData[i].cName=this.tableData[i].Name | |||
this.tableData[i].EngineName = TrainTaskInfo.EngineName.split('-')[0] | |||
this.tableData[i].EngineName = this.getEngineName(this.tableData[i]) | |||
this.tableData[i].ComputeResource = TrainTaskInfo.ComputeResource | |||
this.tableData[i].hasChildren = res.data.data[i].VersionCount===1 ? false : true | |||
} | |||
Dear OpenI User
Thank you for your continuous support to the Openl Qizhi Community AI Collaboration Platform. In order to protect your usage rights and ensure network security, we updated the Openl Qizhi Community AI Collaboration Platform Usage Agreement in January 2024. The updated agreement specifies that users are prohibited from using intranet penetration tools. After you click "Agree and continue", you can continue to use our services. Thank you for your cooperation and understanding.
For more agreement content, please refer to the《Openl Qizhi Community AI Collaboration Platform Usage Agreement》