#3158 MLOPS-api

Merged
lewis merged 63 commits from api into V20221116 1 year ago
  1. +58
    -58
      models/ai_model_manage.go
  2. +0
    -138
      models/attachment.go
  3. +21
    -0
      models/base_message.go
  4. +2
    -2
      models/dataset.go
  5. +2
    -2
      models/repo.go
  6. +20
    -23
      modules/auth/modelarts.go
  7. +6
    -6
      modules/cloudbrain/cloudbrain.go
  8. +111
    -0
      modules/convert/cloudbrain.go
  9. +4
    -4
      modules/grampus/grampus.go
  10. +13
    -13
      modules/modelarts/modelarts.go
  11. +45
    -0
      modules/structs/attachment.go
  12. +84
    -0
      modules/structs/cloudbrain.go
  13. +7
    -0
      modules/structs/tagger.go
  14. +1
    -0
      options/locale/locale_en-US.ini
  15. +1
    -0
      options/locale/locale_zh-CN.ini
  16. +1
    -1
      routers/admin/resources.go
  17. +59
    -0
      routers/api/v1/api.go
  18. +25
    -0
      routers/api/v1/repo/attachments.go
  19. +77
    -0
      routers/api/v1/repo/cloudbrain.go
  20. +123
    -0
      routers/api/v1/repo/datasets.go
  21. +141
    -0
      routers/api/v1/repo/images.go
  22. +71
    -0
      routers/api/v1/repo/mlops.go
  23. +106
    -0
      routers/api/v1/repo/modelmanage.go
  24. +36
    -0
      routers/api/v1/repo/spec.go
  25. +38
    -23
      routers/repo/ai_model_convert.go
  26. +112
    -49
      routers/repo/ai_model_manage.go
  27. +2
    -2
      routers/repo/aisafety.go
  28. +4
    -4
      routers/repo/cloudbrain.go
  29. +2
    -318
      routers/repo/dataset.go
  30. +3
    -50
      routers/repo/grampus.go
  31. +2
    -2
      routers/repo/modelarts.go
  32. +30
    -0
      routers/response/api_response.go
  33. +5
    -1
      routers/response/response.go
  34. +2
    -2
      routers/response/response_list.go
  35. +0
    -4
      routers/routes/routes.go
  36. +12
    -12
      services/cloudbrain/cloudbrainTask/count.go
  37. +631
    -0
      services/cloudbrain/cloudbrainTask/inference.go
  38. +1210
    -0
      services/cloudbrain/cloudbrainTask/train.go
  39. +21
    -6
      services/cloudbrain/resource/resource_specification.go
  40. +2
    -4
      templates/repo/cloudbrain/inference/new.tmpl
  41. +19
    -17
      templates/repo/cloudbrain/trainjob/show.tmpl
  42. +21
    -20
      templates/repo/grampus/trainjob/show.tmpl
  43. +2
    -4
      templates/repo/modelarts/inferencejob/new.tmpl
  44. +23
    -21
      templates/repo/modelarts/trainjob/show.tmpl
  45. +39
    -38
      templates/repo/modelmanage/convertIndex.tmpl
  46. +28
    -25
      templates/repo/modelmanage/index.tmpl
  47. +69
    -71
      templates/repo/modelmanage/showinfo.tmpl
  48. +78
    -78
      web_src/js/components/Model.vue
  49. +4
    -4
      web_src/js/features/cloudbrainShow.js

+ 58
- 58
models/ai_model_manage.go View File

@@ -12,67 +12,67 @@ import (
)

type AiModelManage struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Version string `xorm:"NOT NULL"`
VersionCount int `xorm:"NOT NULL DEFAULT 0"`
New int `xorm:"NOT NULL"`
Type int `xorm:"NOT NULL"`
Size int64 `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Label string `xorm:"varchar(1000)"`
Path string `xorm:"varchar(400) NOT NULL"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0"`
Engine int64 `xorm:"NOT NULL DEFAULT 0"`
Status int `xorm:"NOT NULL DEFAULT 0"`
StatusDesc string `xorm:"varchar(500)"`
Accuracy string `xorm:"varchar(1000)"`
AttachmentId string `xorm:"NULL"`
RepoId int64 `xorm:"INDEX NULL"`
CodeBranch string `xorm:"varchar(400) NULL"`
CodeCommitID string `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
UserName string
UserRelAvatarLink string
TrainTaskInfo string `xorm:"text NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
Version string `xorm:"NOT NULL" json:"version"`
VersionCount int `xorm:"NOT NULL DEFAULT 0" json:"versionCount"`
New int `xorm:"NOT NULL" json:"new"`
Type int `xorm:"NOT NULL" json:"type"`
Size int64 `xorm:"NOT NULL" json:"size"`
Description string `xorm:"varchar(2000)" json:"description"`
Label string `xorm:"varchar(1000)" json:"label"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
DownloadCount int `xorm:"NOT NULL DEFAULT 0" json:"downloadCount"`
Engine int64 `xorm:"NOT NULL DEFAULT 0" json:"engine"`
Status int `xorm:"NOT NULL DEFAULT 0" json:"status"`
StatusDesc string `xorm:"varchar(500)" json:"statusDesc"`
Accuracy string `xorm:"varchar(1000)" json:"accuracy"`
AttachmentId string `xorm:"NULL" json:"attachmentId"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"`
CodeCommitID string `xorm:"NULL" json:"codeCommitID"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelConvert struct {
ID string `xorm:"pk"`
Name string `xorm:"INDEX NOT NULL"`
Status string `xorm:"NULL"`
StatusResult string `xorm:"NULL"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0"`
RepoId int64 `xorm:"INDEX NULL"`
ModelId string `xorm:"NOT NULL"`
ModelName string `xorm:"NULL"`
ModelVersion string `xorm:"NOT NULL"`
ModelPath string `xorm:"NULL"`
DestFormat int `xorm:"NOT NULL DEFAULT 0"`
NetOutputFormat int `xorm:"NULL"`
UserId int64 `xorm:"NOT NULL"`
CloudBrainTaskId string `xorm:"NULL"`
ModelArtsVersionId string `xorm:"NULL"`
ContainerID string
ContainerIp string
RunTime int64 `xorm:"NULL"`
TrainJobDuration string
InputShape string `xorm:"varchar(2000)"`
InputDataFormat string `xorm:"NOT NULL"`
Description string `xorm:"varchar(2000)"`
Path string `xorm:"varchar(400) NOT NULL"`
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
StartTime timeutil.TimeStamp
EndTime timeutil.TimeStamp
UserName string
UserRelAvatarLink string
IsCanOper bool
IsCanDelete bool
ID string `xorm:"pk" json:"id"`
Name string `xorm:"INDEX NOT NULL" json:"name"`
Status string `xorm:"NULL" json:"status"`
StatusResult string `xorm:"NULL" json:"statusResult"`
SrcEngine int `xorm:"NOT NULL DEFAULT 0" json:"srcEngine"`
RepoId int64 `xorm:"INDEX NULL" json:"repoId"`
ModelId string `xorm:"NOT NULL" json:"modelId"`
ModelName string `xorm:"NULL" json:"modelName"`
ModelVersion string `xorm:"NOT NULL" json:"modelVersion"`
ModelPath string `xorm:"NULL" json:"modelPath"`
DestFormat int `xorm:"NOT NULL DEFAULT 0" json:"destFormat"`
NetOutputFormat int `xorm:"NULL" json:"netOutputFormat"`
UserId int64 `xorm:"NOT NULL" json:"userId"`
CloudBrainTaskId string `xorm:"NULL" json:"cloudBrainTaskId"`
ModelArtsVersionId string `xorm:"NULL" json:"modelArtsVersionId"`
ContainerID string `json:"containerID"`
ContainerIp string `json:"containerIp"`
RunTime int64 `xorm:"NULL" json:"runTime"`
TrainJobDuration string `json:"trainJobDuration"`
InputShape string `xorm:"varchar(2000)" json:"inputShape"`
InputDataFormat string `xorm:"NOT NULL" json:"inputDataFormat"`
Description string `xorm:"varchar(2000)" json:"description"`
Path string `xorm:"varchar(400) NOT NULL" json:"path"`
CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"`
StartTime timeutil.TimeStamp `json:"startTime"`
EndTime timeutil.TimeStamp `json:"endTime"`
UserName string `json:"userName"`
UserRelAvatarLink string `json:"userRelAvatarLink"`
IsCanOper bool `json:"isCanOper"`
IsCanDelete bool `json:"isCanDelete"`
}

type AiModelQueryOptions struct {


+ 0
- 138
models/attachment.go View File

@@ -61,30 +61,6 @@ type AttachmentUsername struct {
Name string
}

type AttachmentInfo struct {
Attachment `xorm:"extends"`
Repo *Repository `xorm:"extends"`
RelAvatarLink string `xorm:"extends"`
UserName string `xorm:"extends"`
Recommend bool `xorm:"-"`
}

type AttachmentsOptions struct {
ListOptions
DatasetIDs []int64
DecompressState int
Type int
UploaderID int64
NeedDatasetIDs bool
NeedIsPrivate bool
IsPrivate bool
JustNeedZipFile bool
NeedRepoInfo bool
Keyword string
RecommendOnly bool
UserId int64
}

func (a *Attachment) AfterUpdate() {
if a.DatasetID > 0 {
datasetIsPublicCount, err := x.Where("dataset_id = ? AND is_private = ?", a.DatasetID, false).Count(new(Attachment))
@@ -493,19 +469,6 @@ func getPrivateAttachments(e Engine, userID int64) ([]*AttachmentUsername, error
return attachments, nil
}

func getAllUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
"= `user`.id").Where("decompress_state= ? and attachment.type = ? and (uploader_id= ? or is_private = ?)", DecompressStateDone, TypeCloudBrainOne, userID, false).Find(&attachments); err != nil {
return nil, err
}
return attachments, nil
}

func GetAllUserAttachments(userID int64) ([]*AttachmentUsername, error) {
return getAllUserAttachments(x, userID)
}

func getModelArtsUserAttachments(e Engine, userID int64) ([]*AttachmentUsername, error) {
attachments := make([]*AttachmentUsername, 0, 10)
if err := e.Table("attachment").Join("LEFT", "`user`", "attachment.uploader_id "+
@@ -601,107 +564,6 @@ func GetAllAttachmentSize() (int64, error) {
return x.SumInt(&Attachment{}, "size")
}

func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
sess := x.NewSession()
defer sess.Close()

var cond = builder.NewCond()
if opts.NeedDatasetIDs {
cond = cond.And(
builder.In("attachment.dataset_id", opts.DatasetIDs),
)
}

if opts.UploaderID > 0 {
cond = cond.And(
builder.Eq{"attachment.uploader_id": opts.UploaderID},
)
}

if (opts.Type) >= 0 {
cond = cond.And(
builder.Eq{"attachment.type": opts.Type},
)
}

if opts.NeedIsPrivate {
cond = cond.And(
builder.Eq{"attachment.is_private": opts.IsPrivate},
)
}
if opts.RecommendOnly {
cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id").
From("attachment").
Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true")))
}

if opts.JustNeedZipFile {
var DecompressState []int32
DecompressState = append(DecompressState, DecompressStateDone, DecompressStateIng, DecompressStateFailed)
cond = cond.And(
builder.In("attachment.decompress_state", DecompressState),
)
}

var count int64
var err error
if len(opts.Keyword) == 0 {
count, err = sess.Where(cond).Count(new(Attachment))
} else {
lowerKeyWord := strings.ToLower(opts.Keyword)

cond = cond.And(builder.Or(builder.Like{"LOWER(attachment.name)", lowerKeyWord}, builder.Like{"LOWER(attachment.description)", lowerKeyWord}))
count, err = sess.Table(&Attachment{}).Where(cond).Count(new(AttachmentInfo))

}

if err != nil {
return nil, 0, fmt.Errorf("Count: %v", err)
}

if opts.Page >= 0 && opts.PageSize > 0 {
var start int
if opts.Page == 0 {
start = 0
} else {
start = (opts.Page - 1) * opts.PageSize
}
sess.Limit(opts.PageSize, start)
}

sess.OrderBy("attachment.created_unix DESC")
attachments := make([]*AttachmentInfo, 0, setting.UI.DatasetPagingNum)
if err := sess.Table(&Attachment{}).Where(cond).
Find(&attachments); err != nil {
return nil, 0, fmt.Errorf("Find: %v", err)
}

if opts.NeedRepoInfo {
for _, attachment := range attachments {
dataset, err := GetDatasetByID(attachment.DatasetID)
if err != nil {
return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err)
}
attachment.Recommend = dataset.Recommend
repo, err := GetRepositoryByID(dataset.RepoID)
if err == nil {
attachment.Repo = repo
} else {
return nil, 0, fmt.Errorf("GetRepositoryByID failed error: %v", err)
}
user, err := GetUserByID(attachment.UploaderID)
if err == nil {
attachment.RelAvatarLink = user.RelAvatarLink()
attachment.UserName = user.Name
} else {
return nil, 0, fmt.Errorf("GetUserByID failed error: %v", err)
}
}
}

return attachments, count, nil
}

func GetAllDatasetContributorByDatasetId(datasetId int64) ([]*User, error) {
r := make([]*User, 0)
if err := x.Select("distinct(public.user.*)").Table("attachment").Join("LEFT", "user", "public.user.ID = attachment.uploader_id").Where("attachment.dataset_id = ?", datasetId).Find(&r); err != nil {


+ 21
- 0
models/base_message.go View File

@@ -14,3 +14,24 @@ func BaseErrorMessage(message string) BaseMessage {
1, message,
}
}

type BaseMessageApi struct {
Code int `json:"code"`
Message string `json:"message"`
}

var BaseOKMessageApi = BaseMessageApi{
0, "",
}

func BaseErrorMessageApi(message string) BaseMessageApi {
return BaseMessageApi{
1, message,
}
}

type BaseMessageWithDataApi struct {
Code int `json:"code"`
Message string `json:"message"`
Data interface{} `json:"data"`
}

+ 2
- 2
models/dataset.go View File

@@ -22,8 +22,8 @@ const (

type Dataset struct {
ID int64 `xorm:"pk autoincr"`
Title string `xorm:"INDEX NOT NULL"`
Status int32 `xorm:"INDEX"` // normal_private: 0, pulbic: 1, is_delete: 2
Title string `xorm:"INDEX NOT NULL""`
Status int32 `xorm:"INDEX""` // normal_private: 0, pulbic: 1, is_delete: 2
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64


+ 2
- 2
models/repo.go View File

@@ -223,10 +223,10 @@ type Repository struct {
BlockChainStatus RepoBlockChainStatus `xorm:"NOT NULL DEFAULT 0"`

// git clone and git pull total count
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
CloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"clone_cnt"`

// only git clone total count
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0"`
GitCloneCnt int64 `xorm:"NOT NULL DEFAULT 0" json:"git_clone_cnt"`

CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`


+ 20
- 23
modules/auth/modelarts.go View File

@@ -57,29 +57,26 @@ type CreateModelArtsTrainJobForm struct {
}

type CreateModelArtsInferenceJobForm struct {
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
IsSaveParam string `form:"is_save_para"`
ParameterTemplateName string `form:"parameter_template_name"`
PrameterDescription string `form:"parameter_description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
DisplayJobName string `form:"display_job_name" binding:"Required"`
JobName string `form:"job_name" binding:"Required"`
Attachment string `form:"attachment" binding:"Required"`
BootFile string `form:"boot_file" binding:"Required"`
WorkServerNumber int `form:"work_server_number" binding:"Required"`
EngineID int `form:"engine_id" binding:"Required"`
PoolID string `form:"pool_id" binding:"Required"`
Flavor string `form:"flavor" binding:"Required"`
Params string `form:"run_para_list" binding:"Required"`
Description string `form:"description"`
BranchName string `form:"branch_name" binding:"Required"`
VersionName string `form:"version_name" binding:"Required"`
FlavorName string `form:"flaver_names" binding:"Required"`
EngineName string `form:"engine_names" binding:"Required"`
LabelName string `form:"label_names" binding:"Required"`
TrainUrl string `form:"train_url" binding:"Required"`
ModelName string `form:"model_name" binding:"Required"`
ModelVersion string `form:"model_version" binding:"Required"`
CkptName string `form:"ckpt_name" binding:"Required"`
SpecId int64 `form:"spec_id" binding:"Required"`
}

func (f *CreateModelArtsTrainJobForm) Validate(ctx *macaron.Context, errs binding.Errors) binding.Errors {


+ 6
- 6
modules/cloudbrain/cloudbrain.go View File

@@ -228,7 +228,7 @@ func AdminOrImageCreaterRight(ctx *context.Context) {

}

func GenerateTask(req GenerateCloudBrainTaskReq) error {
func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) {
var versionCount int
if req.JobType == string(models.JobTypeTrain) {
versionCount = 1
@@ -335,11 +335,11 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})
if err != nil {
log.Error("CreateJob failed:", err.Error(), req.Ctx.Data["MsgID"])
return err
return "", err
}
if jobResult.Code != Success {
log.Error("CreateJob(%s) failed:%s", req.JobName, jobResult.Msg, req.Ctx.Data["MsgID"])
return errors.New(jobResult.Msg)
return "", errors.New(jobResult.Msg)
}

var jobID = jobResult.Payload["jobId"].(string)
@@ -380,13 +380,13 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
})

if err != nil {
return err
return "", err
}

task, err := models.GetCloudbrainByJobID(jobID)
if err != nil {
log.Error("GetCloudbrainByJobID failed: %v", err.Error())
return err
return "", err
}

stringId := strconv.FormatInt(task.ID, 10)
@@ -401,7 +401,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
notification.NotifyOtherTask(req.Ctx.User, req.Ctx.Repo.Repository, stringId, req.DisplayJobName, models.ActionCreateDebugGPUTask)
}

return nil
return jobID, nil
}

func IsBenchmarkJob(jobType string) bool {


+ 111
- 0
modules/convert/cloudbrain.go View File

@@ -0,0 +1,111 @@
package convert

import (
"code.gitea.io/gitea/models"
api "code.gitea.io/gitea/modules/structs"
)

func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain {
return &api.Cloudbrain{
ID: task.ID,
JobID: task.JobID,
JobType: task.JobType,
Type: task.Type,
DisplayJobName: task.DisplayJobName,
Status: task.Status,
CreatedUnix: int64(task.CreatedUnix),
RepoID: task.RepoID,
Duration: task.Duration,
TrainJobDuration: task.TrainJobDuration,
ImageID: task.ImageID,
Image: task.Image,
Uuid: task.Uuid,
DatasetName: task.DatasetName,
ComputeResource: task.ComputeResource,
AiCenter: task.AiCenter,
BranchName: task.BranchName,
Parameters: task.Parameters,
BootFile: task.BootFile,
Description: task.Description,
ModelName: task.ModelName,

ModelVersion: task.ModelVersion,
CkptName: task.CkptName,

StartTime: int64(task.StartTime),
EndTime: int64(task.EndTime),

Spec: ToSpecification(task.Spec),
}
}
func ToAttachment(attachment *models.Attachment) *api.AttachmentShow {
return &api.AttachmentShow{
ID: attachment.ID,
UUID: attachment.UUID,
DatasetID: attachment.DatasetID,
ReleaseID: attachment.ReleaseID,
UploaderID: attachment.UploaderID,
CommentID: attachment.CommentID,
Name: attachment.Name,
Description: attachment.Description,
DownloadCount: attachment.DownloadCount,
UseNumber: attachment.UseNumber,
Size: attachment.Size,
IsPrivate: attachment.IsPrivate,
DecompressState: attachment.DecompressState,
Type: attachment.Type,
CreatedUnix: int64(attachment.CreatedUnix),
}
}

func ToDataset(dataset *models.Dataset) *api.Dataset {
var convertAttachments []*api.AttachmentShow
for _, attachment := range dataset.Attachments {
convertAttachments = append(convertAttachments, ToAttachment(attachment))
}
return &api.Dataset{
ID: dataset.ID,
Title: dataset.Title,
Status: dataset.Status,
Category: dataset.Category,
Description: dataset.Description,
DownloadTimes: dataset.DownloadTimes,
UseCount: dataset.UseCount,
NumStars: dataset.NumStars,
Recommend: dataset.Recommend,
License: dataset.License,
Task: dataset.Task,
ReleaseID: dataset.ReleaseID,
UserID: dataset.UserID,
RepoID: dataset.RepoID,
Repo: &api.RepositoryShow{
OwnerName: dataset.Repo.OwnerName,
Name: dataset.Repo.Name,
},
CreatedUnix: int64(dataset.CreatedUnix),
UpdatedUnix: int64(dataset.UpdatedUnix),
Attachments: convertAttachments,
}
}

func ToSpecification(s *models.Specification) *api.SpecificationShow {
return &api.SpecificationShow{
ID: s.ID,
AccCardsNum: s.AccCardsNum,
AccCardType: s.AccCardType,
CpuCores: s.CpuCores,
MemGiB: s.MemGiB,
GPUMemGiB: s.GPUMemGiB,
ShareMemGiB: s.ShareMemGiB,
ComputeResource: s.ComputeResource,
UnitPrice: s.UnitPrice,
}
}

func ToTagger(user *models.User) *api.Tagger {
return &api.Tagger{
Name: user.Name,
RelAvatarURL: user.RelAvatarLink(),
Email: user.Email,
}
}

+ 4
- 4
modules/grampus/grampus.go View File

@@ -102,7 +102,7 @@ func getDatasetGrampus(datasetInfos map[string]models.DatasetInfo) []models.Gram
return datasetGrampus
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()

centerID, centerName := getCentersParamter(ctx, req)
@@ -150,7 +150,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if err != nil {
log.Error("createJob failed: %v", err.Error())
return err
return "", err
}

jobID := jobResult.JobInfo.JobID
@@ -191,7 +191,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, err.Error())
return err
return "", err
}

var actionType models.ActionType
@@ -202,7 +202,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, actionType)

return nil
return jobID, nil
}

func getCentersParamter(ctx *context.Context, req *GenerateTrainJobReq) ([]string, []string) {


+ 13
- 13
modules/modelarts/modelarts.go View File

@@ -350,7 +350,7 @@ func GenerateNotebook2(ctx *context.Context, displayJobName, jobName, uuid, desc
return nil
}

func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error) {
func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -410,17 +410,17 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error
})
if errTemp != nil {
log.Error("InsertCloudbrainTemp failed: %v", errTemp.Error())
return errTemp
return "", errTemp
}
}
return createErr
return "", createErr
}
jobId := strconv.FormatInt(jobResult.JobID, 10)
jobID := strconv.FormatInt(jobResult.JobID, 10)
createErr = models.CreateCloudbrain(&models.Cloudbrain{
Status: TransTrainJobStatus(jobResult.Status),
UserID: ctx.User.ID,
RepoID: ctx.Repo.Repository.ID,
JobID: jobId,
JobID: jobID,
JobName: req.JobName,
DisplayJobName: req.DisplayJobName,
JobType: string(models.JobTypeTrain),
@@ -458,10 +458,10 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (err error

if createErr != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.DisplayJobName, createErr.Error())
return createErr
return "", createErr
}
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobId, req.DisplayJobName, models.ActionCreateTrainTask)
return nil
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateTrainTask)
return jobID, nil
}

func GenerateModelConvertTrainJob(req *GenerateTrainJobReq) (*models.CreateTrainJobResult, error) {
@@ -682,7 +682,7 @@ func GetOutputPathByCount(TotalVersionCount int) (VersionOutputPath string) {
return VersionOutputPath
}

func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (err error) {
func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (jobId string, err error) {
createTime := timeutil.TimeStampNow()
var jobResult *models.CreateTrainJobResult
var createErr error
@@ -742,10 +742,10 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
})
if err != nil {
log.Error("InsertCloudbrainTemp failed: %v", err.Error())
return err
return "", err
}
}
return err
return "", err
}

// attach, err := models.GetAttachmentByUUID(req.Uuid)
@@ -796,7 +796,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e

if err != nil {
log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, err.Error())
return err
return "", err
}
if req.JobType == string(models.JobTypeModelSafety) {
task, err := models.GetCloudbrainByJobID(jobID)
@@ -807,7 +807,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (e
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, jobID, req.DisplayJobName, models.ActionCreateInferenceTask)
}

return nil
return jobID, nil
}

func GetNotebookImageName(imageId string) (string, error) {


+ 45
- 0
modules/structs/attachment.go View File

@@ -27,3 +27,48 @@ type Attachment struct {
type EditAttachmentOptions struct {
Name string `json:"name"`
}

type Dataset struct {
ID int64 `json:"id"`
Title string `json:"title"`
Status int32 `json:"status"`
Category string `json:"category"`
Description string `json:"description"`
DownloadTimes int64 `json:"downloadTimes"`
UseCount int64 `json:"useCount"`
NumStars int `json:"numStars"`
Recommend bool `json:"recommend"`
License string `json:"license"`
Task string `json:"task"`
ReleaseID int64 `json:"releaseId"`
UserID int64 `json:"userId"`
RepoID int64 `json:"repoId"`
Repo *RepositoryShow `json:"repo"`
CreatedUnix int64 `json:"createdUnix"`
UpdatedUnix int64 `json:"updatedUnix"`

Attachments []*AttachmentShow `json:"attachments"`
}

type RepositoryShow struct {
OwnerName string `json:"ownerName"`
Name string `json:"name"`
}

type AttachmentShow struct {
ID int64 `json:"id"`
UUID string `json:"uuid"`
DatasetID int64 `json:"datasetId"`
ReleaseID int64 `json:"releaseId"`
UploaderID int64 `json:"uploaderId"`
CommentID int64 `json:"commentId"`
Name string `json:"name"`
Description string `json:"description"`
DownloadCount int64 `json:"downloadCount"`
UseNumber int64 `json:"useNumber"`
Size int64 `json:"size"`
IsPrivate bool `json:"isPrivate"`
DecompressState int32 `json:"decompressState"`
Type int `json:"type"`
CreatedUnix int64 `json:"createdUnix"`
}

+ 84
- 0
modules/structs/cloudbrain.go View File

@@ -0,0 +1,84 @@
package structs

type CreateGrampusTrainJobOption struct {
DisplayJobName string `json:"display_job_name" binding:"Required"`
JobName string `json:"job_name" binding:"Required" `
Attachment string `json:"attachment" binding:"Required"`
BootFile string `json:"boot_file" binding:"Required"`
ImageID string `json:"image_id" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
Description string `json:"description"`
BranchName string `json:"branch_name" binding:"Required"`
EngineName string `json:"engine_name" binding:"Required"`
WorkServerNumber int `json:"work_server_number" binding:"Required"`
Image string `json:"image" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type CreateTrainJobOption struct {
Type int `json:"type"`
DisplayJobName string `json:"display_job_name" binding:"Required"`
ImageID string `json:"image_id"`
Image string `json:"image" binding:"Required"`
Attachment string `json:"attachment" binding:"Required"`
DatasetName string `json:"dataset_name" binding:"Required"`
Description string `json:"description" `
BootFile string `json:"boot_file" binding:"Required"`
BranchName string `json:"branch_name" binding:"Required"`
Params string `json:"run_para_list" binding:"Required"`
WorkServerNumber int `json:"work_server_number"`
ModelName string `json:"model_name"`
ModelVersion string `json:"model_version"`
CkptName string `json:"ckpt_name"`
LabelName string `json:"label_names"`
PreTrainModelUrl string `json:"pre_train_model_url"`
SpecId int64 `json:"spec_id" binding:"Required"`
}

type Cloudbrain struct {
ID int64 `json:"id"`
JobID string `json:"job_id"`
JobType string `json:"job_type"`
Type int `json:"type"`
DisplayJobName string `json:"display_job_name"`
Status string `json:"status"`
CreatedUnix int64 `json:"created_unix"`
RepoID int64 `json:"repo_id"`
Duration int64 `json:"duration"` //运行时长 单位秒
TrainJobDuration string `json:"train_job_duration"`
ImageID string `json:"image_id"` //grampus image_id
Image string `json:"image"`
Uuid string `json:"uuid"` //数据集id
DatasetName string `json:"dataset_name"`
ComputeResource string `json:"compute_resource"` //计算资源,例如npu
AiCenter string `json:"ai_center"` //grampus ai center: center_id+center_name
BranchName string `json:"branch_name"` //分支名称
Parameters string `json:"parameters"` //传给modelarts的param参数
BootFile string `json:"boot_file"` //启动文件
Description string `json:"description"` //描述
ModelName string `json:"model_name"` //模型名称
ModelVersion string `json:"model_version"` //模型版本
CkptName string `json:"ckpt_name"` //权重文件名称
StartTime int64 `json:"start_time"`
EndTime int64 `json:"end_time"`

Spec *SpecificationShow `json:"spec"`
}

type SpecificationShow struct {
ID int64 `json:"id"`
AccCardsNum int `json:"acc_cards_num"`
AccCardType string `json:"acc_card_type"`
CpuCores int `json:"cpu_cores"`
MemGiB float32 `json:"mem_gi_b"`
GPUMemGiB float32 `json:"gpu_mem_gi_b"`
ShareMemGiB float32 `json:"share_mem_gi_b"`
ComputeResource string `json:"compute_resource"`
UnitPrice int `json:"unit_price"`
}

+ 7
- 0
modules/structs/tagger.go View File

@@ -0,0 +1,7 @@
package structs

type Tagger struct {
Name string `json:"name"`
Email string `json:"email"`
RelAvatarURL string `json:"relAvatarURL"`
}

+ 1
- 0
options/locale/locale_en-US.ini View File

@@ -617,6 +617,7 @@ organization = Organizations
uid = Uid
u2f = Security Keys
bind_wechat = Bind WeChat
no_wechat_bind = Can not do the operation, please bind WeChat first.
wechat_bind = WeChat Binding
bind_account_information = Bind account information
bind_time = Bind Time


+ 1
- 0
options/locale/locale_zh-CN.ini View File

@@ -622,6 +622,7 @@ organization=组织
uid=用户 ID
u2f=安全密钥
wechat_bind = 微信绑定
no_wechat_bind = 不能创建任务,请先绑定微信。
bind_wechat = 绑定微信
bind_account_information = 绑定账号信息
bind_time = 绑定时间


+ 1
- 1
routers/admin/resources.go View File

@@ -182,7 +182,7 @@ func UpdateResourceSpecification(ctx *context.Context, req models.ResourceSpecif

if err != nil {
log.Error("UpdateResourceSpecification error. %v", err)
ctx.JSON(http.StatusOK, response.ResponseError(err))
ctx.JSON(http.StatusOK, response.ResponseBizError(err))
return
}
ctx.JSON(http.StatusOK, response.Success())


+ 59
- 0
routers/api/v1/api.go View File

@@ -242,6 +242,15 @@ func reqRepoWriter(unitTypes ...models.UnitType) macaron.Handler {
}
}

func reqWeChat() macaron.Handler {
return func(ctx *context.Context) {
if setting.WechatAuthSwitch && ctx.User.WechatOpenId == "" {
ctx.JSON(http.StatusForbidden, models.BaseErrorMessageApi("settings.no_wechat_bind"))
return
}
}
}

// reqRepoReader user should have specific read permission or be a repo admin or a site admin
func reqRepoReader(unitType models.UnitType) macaron.Handler {
return func(ctx *context.Context) {
@@ -517,6 +526,25 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", misc.MarkdownRaw)

m.Group("/images", func() {

m.Get("/public", repo.GetPublicImages)
m.Get("/custom", repo.GetCustomImages)
m.Get("/star", repo.GetStarImages)
m.Get("/npu", repo.GetNpuImages)

}, reqToken())

m.Group("/attachments", func() {

m.Get("/:uuid", repo.GetAttachment)
m.Get("/get_chunks", repo.GetSuccessChunks)
m.Get("/new_multipart", repo.NewMultipart)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)

}, reqToken())

// Notifications
m.Group("/notifications", func() {
m.Combo("").
@@ -701,6 +729,13 @@ func RegisterRoutes(m *macaron.Macaron) {

m.Combo("/repositories/:id", reqToken()).Get(repo.GetByID)

m.Group("/datasets/:username/:reponame", func() {
m.Get("/current_repo", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets", repo.MyDatasetsMultiple)
m.Get("/public_datasets", repo.PublicDatasetMultiple)
m.Get("/my_favorite", repo.MyFavoriteDatasetMultiple)
}, reqToken(), repoAssignment())

m.Group("/repos", func() {
m.Get("/search", repo.Search)

@@ -709,7 +744,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/migrate", reqToken(), bind(auth.MigrateRepoForm{}), repo.Migrate)
m.Post("/migrate/submit", reqToken(), bind(auth.MigrateRepoForm{}), repo.MigrateSubmit)

m.Group("/specification", func() {
m.Get("", repo.GetResourceSpec)
}, reqToken())

m.Group("/:username/:reponame", func() {
m.Get("/right", reqToken(), repo.GetRight)
m.Get("/tagger", reqToken(), repo.ListTagger)
m.Combo("").Get(reqAnyRepoReader(), repo.Get).
Delete(reqToken(), reqOwner(), repo.Delete).
Patch(reqToken(), reqAdmin(), bind(api.EditRepoOption{}), context.RepoRef(), repo.Edit)
@@ -938,21 +979,39 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/:id/log", repo.CloudbrainGetLog)
m.Get("/:id/download_log_file", repo.CloudbrainDownloadLogFile)
m.Group("/train-job", func() {

m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), context.ReferencesGitRepo(false), bind(api.CreateTrainJobOption{}), repo.CreateCloudBrain)

m.Group("/:jobid", func() {
m.Get("", repo.GetModelArtsTrainJobVersion)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)
m.Get("/model_list", repo.CloudBrainModelList)
m.Post("/stop_version", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo_ext.CloudBrainStop)
})
})
m.Group("/inference-job", func() {
m.Post("/create", reqToken(), reqRepoWriter(models.UnitTypeCloudBrain), reqWeChat(), bind(api.CreateTrainJobOption{}), context.ReferencesGitRepo(false), repo.CreateCloudBrainInferenceTask)

m.Group("/:jobid", func() {
m.Get("", repo.GetCloudBrainInferenceJob)
m.Get("/detail", reqToken(), reqRepoReader(models.UnitTypeCloudBrain), repo.CloudBrainShow)

m.Post("/del", cloudbrain.AdminOrOwnerOrJobCreaterRightForTrain, repo.DelCloudBrainJob)
m.Get("/result_list", repo.InferencJobResultList)
})
})
}, reqRepoReader(models.UnitTypeCloudBrain))
m.Group("/modelmanage", func() {
m.Post("/create_new_model", repo.CreateNewModel)
m.Get("/show_model_api", repo.ShowModelManageApi)
m.Delete("/delete_model", repo.DeleteModel)
m.Get("/downloadall", repo.DownloadModel)
m.Get("/query_model_byId", repo.QueryModelById)
m.Get("/query_model_for_predict", repo.QueryModelListForPredict)
m.Get("/query_modelfile_for_predict", repo.QueryModelFileForPredict)
m.Get("/query_train_model", repo.QueryTrainModelList)
m.Post("/create_model_convert", repo.CreateModelConvert)
m.Get("/show_model_convert_page")
m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
m.Get("/:id/modelartlog", repo.TrainJobForModelConvertGetLog)


+ 25
- 0
routers/api/v1/repo/attachments.go View File

@@ -0,0 +1,25 @@
package repo

import (
"code.gitea.io/gitea/modules/context"
routeRepo "code.gitea.io/gitea/routers/repo"
)

func GetSuccessChunks(ctx *context.APIContext) {
routeRepo.GetSuccessChunks(ctx.Context)
}

func NewMultipart(ctx *context.APIContext) {
routeRepo.NewMultipart(ctx.Context)
}
func GetMultipartUploadUrl(ctx *context.APIContext) {
routeRepo.GetMultipartUploadUrl(ctx.Context)
}

func CompleteMultipart(ctx *context.APIContext) {
routeRepo.CompleteMultipart(ctx.Context)

}
func GetAttachment(ctx *context.APIContext) {
routeRepo.GetAttachment(ctx.Context)
}

+ 77
- 0
routers/api/v1/repo/cloudbrain.go View File

@@ -16,8 +16,14 @@ import (
"strings"
"time"

cloudbrainService "code.gitea.io/gitea/services/cloudbrain"

"code.gitea.io/gitea/modules/convert"

"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/setting"
@@ -31,6 +37,77 @@ import (
routerRepo "code.gitea.io/gitea/routers/repo"
)

func CloudBrainShow(ctx *context.APIContext) {

task, err := models.GetCloudbrainByJobID(ctx.Params(":jobid"))

if err != nil {
log.Info("error:" + err.Error())
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("repo.cloudbrain_query_fail"))
return
}
cloudbrainTask.PrepareSpec4Show(task)
task.ContainerIp = ""
if cloudbrainTask.IsTaskNotStop(task) {
cloudbrainTask.SyncTaskStatus(task)
}

if task.TrainJobDuration == "" {
if task.Duration == 0 {
var duration int64
if task.Status == string(models.JobWaiting) {
duration = 0
} else if task.Status == string(models.JobRunning) {
duration = time.Now().Unix() - int64(task.CreatedUnix)
} else {
duration = int64(task.UpdatedUnix) - int64(task.CreatedUnix)
}
task.Duration = duration
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)
}
//to unify image output
if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
task.ImageID = strconv.FormatInt(task.EngineID, 10)
task.Image = task.EngineName

} else if task.Type == models.TypeC2Net {
task.Image = task.EngineName
}
task.AiCenter = cloudbrainService.GetAiCenterShow(task.AiCenter, ctx.Context)

ctx.JSON(http.StatusOK, models.BaseMessageWithDataApi{Code: 0, Message: "", Data: convert.ToCloudBrain(task)})

}

func CreateCloudBrain(ctx *context.APIContext, option api.CreateTrainJobOption) {
if option.Type == cloudbrainTask.TaskTypeCloudbrainOne {
cloudbrainTask.CloudbrainOneTrainJobCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeModelArts {
cloudbrainTask.ModelArtsTrainJobNpuCreate(ctx.Context, option)
}

if option.Type == cloudbrainTask.TaskTypeGrampusGPU {
cloudbrainTask.GrampusTrainJobGpuCreate(ctx.Context, option)
}
if option.Type == cloudbrainTask.TaskTypeGrampusNPU {
cloudbrainTask.GrampusTrainJobNpuCreate(ctx.Context, option)
}

}

func CreateCloudBrainInferenceTask(ctx *context.APIContext, option api.CreateTrainJobOption) {

if option.Type == 0 {
cloudbrainTask.CloudBrainInferenceJobCreate(ctx.Context, option)
}
if option.Type == 1 {
cloudbrainTask.ModelArtsInferenceJobCreate(ctx.Context, option)
}

}

// cloudbrain get job task by jobid
func GetCloudbrainTask(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/cloudbrain/{jobid} cloudbrain jobTask


+ 123
- 0
routers/api/v1/repo/datasets.go View File

@@ -0,0 +1,123 @@
package repo

import (
"fmt"
"strings"

"code.gitea.io/gitea/modules/convert"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
)

func PublicDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
PublicOnly: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}

func MyFavoriteDatasetMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
StarByMe: true,
DatasetIDs: models.GetDatasetIdsStarByUser(ctx.User.ID),
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)
}

func CurrentRepoDatasetMultiple(ctx *context.APIContext) {
datasetIds := models.GetDatasetIdsByRepoID(ctx.Repo.Repository.ID)
searchOrderBy := getSearchOrderByInValues(datasetIds)
opts := &models.SearchDatasetOptions{
RepoID: ctx.Repo.Repository.ID,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
DatasetIDs: datasetIds,
SearchOrderBy: searchOrderBy,
}

datasetMultiple(ctx, opts)

}

func MyDatasetsMultiple(ctx *context.APIContext) {

opts := &models.SearchDatasetOptions{
UploadAttachmentByMe: true,
NeedAttachment: true,
CloudBrainType: ctx.QueryInt("type"),
}
datasetMultiple(ctx, opts)

}
func datasetMultiple(ctx *context.APIContext, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
if page < 1 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize < 1 {
pageSize = setting.UI.DatasetPagingNum
}

keyword := strings.Trim(ctx.Query("q"), " ")
opts.Keyword = keyword
if opts.SearchOrderBy.String() == "" {
opts.SearchOrderBy = models.SearchOrderByRecentUpdated
}

opts.RecommendOnly = ctx.QueryBool("recommend")
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
opts.JustNeedZipFile = true
opts.User = ctx.User

datasets, count, err := models.SearchDataset(opts)

if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]interface{}{
"code": 1,
"message": err.Error(),
"data": []*api.Dataset{},
"count": 0,
})
return
}
var convertDatasets []*api.Dataset
for _, dataset := range datasets {
convertDatasets = append(convertDatasets, convert.ToDataset(dataset))
}

ctx.JSON(200, map[string]interface{}{
"code": 0,
"message": "",
"data": convertDatasets,
"count": count,
})
}

func getSearchOrderByInValues(datasetIds []int64) models.SearchOrderBy {
if len(datasetIds) == 0 {
return ""
}
searchOrderBy := "CASE id "
for i, id := range datasetIds {
searchOrderBy += fmt.Sprintf(" WHEN %d THEN %d", id, i+1)
}
searchOrderBy += " ELSE 0 END"
return models.SearchOrderBy(searchOrderBy)
}

+ 141
- 0
routers/api/v1/repo/images.go View File

@@ -0,0 +1,141 @@
package repo

import (
"encoding/json"
"net/http"
"strconv"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
)

type NPUImageINFO struct {
ID string `json:"id"`
Value string `json:"value"`
}

func GetPublicImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
IncludePublicOnly: true,
UID: uid,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
IncludeOfficialOnly: ctx.QueryBool("recommend"),
SearchOrderBy: "type desc, num_stars desc,id desc",
Status: models.IMAGE_STATUS_SUCCESS,
CloudbrainType: ctx.QueryInt("cloudbrainType"),
}

getImages(ctx, &opts)

}

func GetCustomImages(ctx *context.APIContext) {
uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeOwnerOnly: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: -1,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}
func GetStarImages(ctx *context.APIContext) {

uid := getUID(ctx)
opts := models.SearchImageOptions{
UID: uid,
IncludeStarByMe: true,
Keyword: ctx.Query("q"),
Topics: ctx.Query("topic"),
Status: models.IMAGE_STATUS_SUCCESS,
SearchOrderBy: "id desc",
}
getImages(ctx, &opts)

}

func GetNpuImages(ctx *context.APIContext) {
cloudbrainType := ctx.QueryInt("type")
if cloudbrainType == 0 { //modelarts
getModelArtsImages(ctx)
} else { //c2net
getC2netNpuImages(ctx)
}
}

func getModelArtsImages(ctx *context.APIContext) {

var versionInfos modelarts.VersionInfo
_ = json.Unmarshal([]byte(setting.EngineVersions), &versionInfos)
var npuImageInfos []NPUImageINFO
for _, info := range versionInfos.Version {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: strconv.Itoa(info.ID),
Value: info.Value,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)

}

func getC2netNpuImages(ctx *context.APIContext) {
images, err := grampus.GetImages(grampus.ProcessorTypeNPU)
var npuImageInfos []NPUImageINFO
if err != nil {
log.Error("GetImages failed:", err.Error())
ctx.JSON(http.StatusOK, []NPUImageINFO{})
} else {
for _, info := range images.Infos {
npuImageInfos = append(npuImageInfos, NPUImageINFO{
ID: info.ID,
Value: info.Name,
})
}
ctx.JSON(http.StatusOK, npuImageInfos)
}
}
func getImages(ctx *context.APIContext, opts *models.SearchImageOptions) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}

pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = 15
}
opts.ListOptions = models.ListOptions{
Page: page,
PageSize: pageSize,
}
imageList, total, err := models.SearchImage(opts)
if err != nil {
log.Error("Can not get images:%v", err)
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: 0,
Images: []*models.Image{},
})
} else {
ctx.JSON(http.StatusOK, models.ImagesPageResult{
Count: total,
Images: imageList,
})
}
}

func getUID(ctx *context.APIContext) int64 {
var uid int64 = -1
if ctx.IsSigned {
uid = ctx.User.ID
}
return uid
}

+ 71
- 0
routers/api/v1/repo/mlops.go View File

@@ -0,0 +1,71 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/models"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/log"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/api/v1/utils"
)

//标注任务可分配人员
func ListTagger(ctx *context.APIContext) {

taggers := make([]*api.Tagger, 0)
userRemember := make(map[string]string)
collaborators, err := ctx.Repo.Repository.GetCollaborators(utils.GetListOptions(ctx))
if err != nil {
log.Warn("ListCollaborators", err)
ctx.JSON(http.StatusOK, taggers)
return
}
for _, collaborator := range collaborators {
taggers = append(taggers, convert.ToTagger(collaborator.User))
userRemember[collaborator.User.Name] = ""
}

teams, err := ctx.Repo.Repository.GetRepoTeams()
if err != nil {
log.Warn("ListTeams", err)
ctx.JSON(http.StatusOK, taggers)
return
}

for _, team := range teams {
team.GetMembers(&models.SearchMembersOptions{})
for _, user := range team.Members {
if _, ok := userRemember[user.Name]; !ok {
taggers = append(taggers, convert.ToTagger(user))
userRemember[user.Name] = ""
}
}
}
if !ctx.Repo.Owner.IsOrganization() {
if _, ok := userRemember[ctx.Repo.Owner.Name]; !ok {
taggers = append(taggers, convert.ToTagger(ctx.Repo.Owner))

}
}
ctx.JSON(http.StatusOK, taggers)

}
func GetRight(ctx *context.APIContext) {
right := "none"

if ctx.IsUserRepoReaderSpecific(models.UnitTypeCode) {
right = "read"
}

if ctx.IsUserRepoWriter([]models.UnitType{models.UnitTypeCode}) || ctx.IsUserRepoAdmin() {
right = "write"
}

ctx.JSON(http.StatusOK, map[string]string{
"right": right,
})

}

+ 106
- 0
routers/api/v1/repo/modelmanage.go View File

@@ -0,0 +1,106 @@
package repo

import (
"net/http"

"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/storage"
routerRepo "code.gitea.io/gitea/routers/repo"
)

type FileInfo struct {
FileName string `json:"fileName"`
ModTime string `json:"modTime"`
IsDir bool `json:"isDir"`
Size int64 `json:"size"`
ParenDir string `json:"parenDir"`
UUID string `json:"uuid"`
}

func CreateNewModel(ctx *context.APIContext) {
log.Info("CreateNewModel by api.")
routerRepo.SaveModel(ctx.Context)
}

func ShowModelManageApi(ctx *context.APIContext) {
log.Info("ShowModelManageApi by api.")
routerRepo.ShowModelPageInfo(ctx.Context)
}

func DeleteModel(ctx *context.APIContext) {
log.Info("DeleteModel by api.")
routerRepo.DeleteModel(ctx.Context)
}

func DownloadModel(ctx *context.APIContext) {
log.Info("DownloadModel by api.")
routerRepo.DownloadMultiModelFile(ctx.Context)
}

func QueryModelById(ctx *context.APIContext) {
log.Info("QueryModelById by api.")
routerRepo.QueryModelById(ctx.Context)
}

func QueryModelListForPredict(ctx *context.APIContext) {
log.Info("QueryModelListForPredict by api.")
routerRepo.QueryModelListForPredict(ctx.Context)
}

func QueryTrainModelList(ctx *context.APIContext) {
result, err := routerRepo.QueryTrainModelFileById(ctx.Context)
if err != nil {
log.Info("query error." + err.Error())
}
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func convertFileFormat(result []storage.FileInfo) []FileInfo {
re := make([]FileInfo, 0)
if result != nil {
for _, file := range result {
tmpFile := FileInfo{
FileName: file.FileName,
ModTime: file.ModTime,
IsDir: file.IsDir,
Size: file.Size,
ParenDir: file.ParenDir,
UUID: file.UUID,
}
re = append(re, tmpFile)
}
}
return re
}

func QueryModelFileForPredict(ctx *context.APIContext) {
log.Info("QueryModelFileForPredict by api.")
id := ctx.Query("id")
result := routerRepo.QueryModelFileByID(id)
re := convertFileFormat(result)
ctx.JSON(http.StatusOK, re)
}

func CreateModelConvert(ctx *context.APIContext) {
log.Info("CreateModelConvert by api.")
routerRepo.SaveModelConvert(ctx.Context)
}

func ShowModelConvertPage(ctx *context.APIContext) {
log.Info("ShowModelConvertPage by api.")
modelResult, count, err := routerRepo.GetModelConvertPageData(ctx.Context)
if err == nil {
mapInterface := make(map[string]interface{})
mapInterface["data"] = modelResult
mapInterface["count"] = count
ctx.JSON(http.StatusOK, mapInterface)
} else {
mapInterface := make(map[string]interface{})
mapInterface["data"] = nil
mapInterface["count"] = 0
ctx.JSON(http.StatusOK, mapInterface)
}

}

+ 36
- 0
routers/api/v1/repo/spec.go View File

@@ -0,0 +1,36 @@
package repo

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/cloudbrain/resource"
)

func GetResourceSpec(ctx *context.APIContext) {
jobType := ctx.Query("jobType")
computeResource := ctx.Query("compute")
cluster := ctx.Query("cluster")
aiCenterCode := ctx.Query("center")
if jobType == "" || computeResource == "" || cluster == "" {
log.Info("GetResourceSpec api.param error")
ctx.JSON(200, response.OuterBizError(response.PARAM_ERROR))
return
}
specs, err := resource.FindAvailableSpecs4Show(ctx.User.ID, models.FindSpecsOptions{
JobType: models.JobType(jobType),
ComputeResource: computeResource,
Cluster: cluster,
AiCenterCode: aiCenterCode,
})
if err != nil {
log.Error("GetResourceSpec api error. %v", err)
ctx.JSON(200, response.OuterServerError(err.Error()))
return
}

specMap := make(map[string]interface{}, 0)
specMap["specs"] = specs
ctx.JSON(200, response.OuterSuccessWithData(specMap))
}

+ 38
- 23
routers/repo/ai_model_convert.go View File

@@ -74,27 +74,27 @@ func SaveModelConvert(ctx *context.Context) {
log.Info("save model convert start.")
if !ctx.Repo.CanWrite(models.UnitTypeModelManage) {
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.no_operate_right"),
})
return
}
name := ctx.Query("name")
desc := ctx.Query("desc")
modelId := ctx.Query("modelId")
modelPath := ctx.Query("ModelFile")
SrcEngine := ctx.QueryInt("SrcEngine")
modelPath := ctx.Query("modelFile")
SrcEngine := ctx.QueryInt("srcEngine")
InputShape := ctx.Query("inputshape")
InputDataFormat := ctx.Query("inputdataformat")
DestFormat := ctx.QueryInt("DestFormat")
NetOutputFormat := ctx.QueryInt("NetOutputFormat")
DestFormat := ctx.QueryInt("destFormat")
NetOutputFormat := ctx.QueryInt("netOutputFormat")

task, err := models.QueryModelById(modelId)
if err != nil {
log.Error("no such model!", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.model_not_exist"),
})
return
}
@@ -105,8 +105,8 @@ func SaveModelConvert(ctx *context.Context) {
if convert.Name == name {
log.Info("convert.Name=" + name + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error1"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error1"),
})
return
}
@@ -119,8 +119,8 @@ func SaveModelConvert(ctx *context.Context) {
if isRunningTask(convert.Status) {
log.Info("convert.Status=" + convert.Status + " convert.id=" + convert.ID)
ctx.JSON(200, map[string]string{
"result_code": "1",
"message": ctx.Tr("repo.modelconvert.manage.create_error2"),
"code": "1",
"msg": ctx.Tr("repo.modelconvert.manage.create_error2"),
})
return
}
@@ -150,7 +150,7 @@ func SaveModelConvert(ctx *context.Context) {
go goCreateTask(modelConvert, ctx, task)

ctx.JSON(200, map[string]string{
"result_code": "0",
"code": "0",
})
}

@@ -604,11 +604,11 @@ func StopModelConvert(ctx *context.Context) {
}

func ShowModelConvertInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)

job, err := models.QueryModelConvertById(ctx.Query("ID"))
job, err := models.QueryModelConvertById(ctx.Query("id"))
if err == nil {
if job.TrainJobDuration == "" {
job.TrainJobDuration = "00:00:00"
@@ -707,6 +707,26 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
return
}
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pageSize := ctx.QueryInt("pageSize")
if pageSize <= 0 {
pageSize = setting.UI.IssuePagingNum
}
modelResult, count, err := GetModelConvertPageData(ctx)
if err == nil {
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult
ctx.Data["MODEL_CONVERT_COUNT"] = count
} else {
ctx.ServerError("Query data error.", err)
}
}

func GetModelConvertPageData(ctx *context.Context) ([]*models.AiModelConvert, int64, error) {
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
@@ -725,10 +745,8 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
})
if err != nil {
log.Info("query db error." + err.Error())
ctx.ServerError("Cloudbrain", err)
return
return nil, 0, err
}
ctx.Data["MODEL_CONVERT_COUNT"] = count
userIds := make([]int64, len(modelResult))
for i, model := range modelResult {
model.IsCanOper = isOper(ctx, model.UserId)
@@ -743,10 +761,7 @@ func ShowModelConvertPageInfo(ctx *context.Context) {
model.UserRelAvatarLink = value.RelAvatarLink()
}
}
pager := context.NewPagination(int(count), page, pageSize, 5)
ctx.Data["Page"] = pager
ctx.Data["Tasks"] = modelResult

return modelResult, count, nil
}

func ModelConvertDownloadModel(ctx *context.Context) {
@@ -757,7 +772,7 @@ func ModelConvertDownloadModel(ctx *context.Context) {
ctx.ServerError("Not found task.", err)
return
}
AllDownload := ctx.QueryBool("AllDownload")
AllDownload := ctx.QueryBool("allDownload")
if AllDownload {
if job.IsGpuTrainTask() {
path := setting.CBCodePathPrefix + job.ID + "/model/"


+ 112
- 49
routers/repo/ai_model_manage.go View File

@@ -34,13 +34,13 @@ const (
STATUS_ERROR = 2
)

func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) error {
func saveModelByParameters(jobId string, versionName string, name string, version string, label string, description string, engine int, ctx *context.Context) (string, error) {
aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName)
if err != nil {
aiTask, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, jobId)
if err != nil {
log.Info("query task error." + err.Error())
return err
return "", err
} else {
log.Info("query gpu train task.")
}
@@ -56,7 +56,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
if len(aimodels) > 0 {
for _, model := range aimodels {
if model.Version == version {
return errors.New(ctx.Tr("repo.model.manage.create_error"))
return "", errors.New(ctx.Tr("repo.model.manage.create_error"))
}
if model.New == MODEL_LATEST {
lastNewModelId = model.ID
@@ -111,7 +111,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

err = models.SaveModelToDb(model)
if err != nil {
return err
return "", err
}
if len(lastNewModelId) > 0 {
//udpate status and version count
@@ -134,7 +134,7 @@ func saveModelByParameters(jobId string, versionName string, name string, versio

log.Info("save model end.")
notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask)
return nil
return id, nil
}

func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile string) {
@@ -173,7 +173,7 @@ func SaveNewNameModel(ctx *context.Context) {
ctx.Error(403, ctx.Tr("repo.model_noright"))
return
}
name := ctx.Query("Name")
name := ctx.Query("name")
if name == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
return
@@ -195,38 +195,42 @@ func SaveModel(ctx *context.Context) {
return
}
log.Info("save model start.")
JobId := ctx.Query("JobId")
VersionName := ctx.Query("VersionName")
name := ctx.Query("Name")
version := ctx.Query("Version")
label := ctx.Query("Label")
description := ctx.Query("Description")
engine := ctx.QueryInt("Engine")
JobId := ctx.Query("jobId")
VersionName := ctx.Query("versionName")
name := ctx.Query("name")
version := ctx.Query("version")
label := ctx.Query("label")
description := ctx.Query("description")
engine := ctx.QueryInt("engine")
modelSelectedFile := ctx.Query("modelSelectedFile")
log.Info("engine=" + fmt.Sprint(engine) + " modelSelectedFile=" + modelSelectedFile)

re := map[string]string{
"code": "-1",
}
if JobId == "" || VersionName == "" {
ctx.Error(500, fmt.Sprintf("JobId or VersionName is null."))
re["msg"] = "JobId or VersionName is null."
ctx.JSON(200, re)
return
}
if modelSelectedFile == "" {
ctx.Error(500, fmt.Sprintf("Not selected model file."))
re["msg"] = "Not selected model file."
ctx.JSON(200, re)
return
}

if name == "" || version == "" {
ctx.Error(500, fmt.Sprintf("name or version is null."))
re["msg"] = "name or version is null."
ctx.JSON(200, re)
return
}

err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)

id, err := saveModelByParameters(JobId, VersionName, name, version, label, description, engine, ctx)
if err != nil {
log.Info("save model error." + err.Error())
ctx.Error(500, fmt.Sprintf("save model error. %v", err))
return
re["msg"] = err.Error()
} else {
re["code"] = "0"
re["id"] = id
}
ctx.Status(200)
ctx.JSON(200, re)
log.Info("save model end.")
}

@@ -291,13 +295,17 @@ func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir

func DeleteModel(ctx *context.Context) {
log.Info("delete model start.")
id := ctx.Query("ID")
id := ctx.Query("id")
err := deleteModelByID(ctx, id)
if err != nil {
ctx.JSON(500, err.Error())
re := map[string]string{
"code": "-1",
}
re["msg"] = err.Error()
ctx.JSON(200, re)
} else {
ctx.JSON(200, map[string]string{
"result_code": "0",
"code": "0",
})
}
}
@@ -354,7 +362,7 @@ func QueryModelByParameters(repoId int64, page int) ([]*models.AiModelManage, in

func DownloadMultiModelFile(ctx *context.Context) {
log.Info("DownloadMultiModelFile start.")
id := ctx.Query("ID")
id := ctx.Query("id")
log.Info("id=" + id)
task, err := models.QueryModelById(id)
if err != nil {
@@ -487,7 +495,10 @@ func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *con

func QueryTrainJobVersionList(ctx *context.Context) {
log.Info("query train job version list. start.")
JobID := ctx.Query("JobID")
JobID := ctx.Query("jobId")
if JobID == "" {
JobID = ctx.Query("JobId")
}

VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)

@@ -515,20 +526,33 @@ func QueryTrainJobList(ctx *context.Context) {

}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("VersionName")
func QueryTrainModelFileById(ctx *context.Context) ([]storage.FileInfo, error) {
JobID := ctx.Query("jobId")
VersionListTasks, count, err := models.QueryModelTrainJobVersionList(JobID)
if err == nil {
if count == 1 {
task := VersionListTasks[0]
jobName := task.JobName
taskType := task.Type
VersionName := task.VersionName
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
return modelDbResult, err
}
}
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, errors.New("Not found task.")
}

func getModelFromObjectSave(jobName string, taskType int, VersionName string) ([]storage.FileInfo, error) {
if taskType == models.TypeCloudBrainTwo {
objectkey := path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, VersionName) + "/"
modelDbResult, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, objectkey)
log.Info("bucket=" + setting.Bucket + " objectkey=" + objectkey)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
} else if taskType == models.TypeCloudBrainOne {
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
@@ -536,12 +560,30 @@ func QueryTrainModelList(ctx *context.Context) {
modelDbResult, err := storage.GetAllObjectByBucketAndPrefixMinio(bucketName, modelSrcPrefix)
if err != nil {
log.Info("get TypeCloudBrainOne TrainJobListModel failed:", err)
return nil, err
} else {
ctx.JSON(200, modelDbResult)
return
return modelDbResult, nil
}
}
ctx.JSON(200, "")
return nil, errors.New("Not support.")
}

func QueryTrainModelList(ctx *context.Context) {
log.Info("query train job list. start.")
jobName := ctx.Query("jobName")
taskType := ctx.QueryInt("type")
VersionName := ctx.Query("versionName")
if VersionName == "" {
VersionName = ctx.Query("VersionName")
}
modelDbResult, err := getModelFromObjectSave(jobName, taskType, VersionName)
if err != nil {
log.Info("get TypeCloudBrainTwo TrainJobListModel failed:", err)
ctx.JSON(200, "")
} else {
ctx.JSON(200, modelDbResult)
return
}
}

func DownloadSingleModelFile(ctx *context.Context) {
@@ -612,7 +654,7 @@ func DownloadSingleModelFile(ctx *context.Context) {
}

func ShowModelInfo(ctx *context.Context) {
ctx.Data["ID"] = ctx.Query("ID")
ctx.Data["ID"] = ctx.Query("id")
ctx.Data["name"] = ctx.Query("name")
ctx.Data["isModelManage"] = true
ctx.Data["ModelManageAccess"] = ctx.Repo.CanWrite(models.UnitTypeModelManage)
@@ -620,6 +662,19 @@ func ShowModelInfo(ctx *context.Context) {
ctx.HTML(200, tplModelInfo)
}

func QueryModelById(ctx *context.Context) {
id := ctx.Query("id")
model, err := models.QueryModelById(id)
if err == nil {
model.IsCanOper = isOper(ctx, model.UserId)
model.IsCanDelete = isCanDelete(ctx, model.UserId)
removeIpInfo(model)
ctx.JSON(http.StatusOK, model)
} else {
ctx.JSON(http.StatusNotFound, nil)
}
}

func ShowSingleModel(ctx *context.Context) {
name := ctx.Query("name")

@@ -828,8 +883,8 @@ func ModifyModel(id string, description string) error {

func ModifyModelInfo(ctx *context.Context) {
log.Info("modify model start.")
id := ctx.Query("ID")
description := ctx.Query("Description")
id := ctx.Query("id")
description := ctx.Query("description")

task, err := models.QueryModelById(id)
if err != nil {
@@ -894,28 +949,36 @@ func QueryModelListForPredict(ctx *context.Context) {
}

func QueryModelFileForPredict(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
ctx.JSON(http.StatusOK, QueryModelFileByID(id))
}

func QueryModelFileByID(id string) []storage.FileInfo {
model, err := models.QueryModelById(id)
if err == nil {
if model.Type == models.TypeCloudBrainTwo {
prefix := model.Path[len(setting.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
} else if model.Type == models.TypeCloudBrainOne {
prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:]
fileinfos, _ := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, prefix)
ctx.JSON(http.StatusOK, fileinfos)
return fileinfos
}
} else {
log.Error("no such model!", err.Error())
ctx.ServerError("no such model:", err)
return
}
return nil
}

func QueryOneLevelModelFile(ctx *context.Context) {
id := ctx.Query("ID")
id := ctx.Query("id")
if id == "" {
id = ctx.Query("ID")
}
parentDir := ctx.Query("parentDir")
model, err := models.QueryModelById(id)
if err != nil {


+ 2
- 2
routers/repo/aisafety.go View File

@@ -804,7 +804,7 @@ func createForNPU(ctx *context.Context, jobName string) error {
JobType: string(models.JobTypeModelSafety),
}

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
return err
@@ -901,7 +901,7 @@ func createForGPU(ctx *context.Context, jobName string) error {
LabelName: evaluationIndex,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
return err
}


+ 4
- 4
routers/repo/cloudbrain.go View File

@@ -398,7 +398,7 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {

}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -584,7 +584,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)
@@ -2487,7 +2487,7 @@ func BenchMarkAlgorithmCreate(ctx *context.Context, form auth.CreateCloudBrainFo
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tplCloudBrainBenchmarkNew, &form)
@@ -2641,7 +2641,7 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm)
Spec: spec,
}

err = cloudbrain.GenerateTask(req)
_, err = cloudbrain.GenerateTask(req)
if err != nil {
cloudBrainNewDataPrepare(ctx, jobType)
ctx.RenderWithErr(err.Error(), tpl, &form)


+ 2
- 318
routers/repo/dataset.go View File

@@ -47,8 +47,8 @@ func newFilterPrivateAttachments(ctx *context.Context, list []*models.Attachment
permission := false
if !permission && ctx.User != nil {
isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
isInRepoTeam,_:=repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator ||isInRepoTeam {
isInRepoTeam, _ := repo.IsInRepoTeam(ctx.User.ID)
if isCollaborator || isInRepoTeam {
log.Info("Collaborator user may visit the attach.")
permission = true
}
@@ -349,96 +349,6 @@ func DatasetAction(ctx *context.Context) {

}

func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
return
}
datasetIDs = append(datasetIDs, dataset.ID)
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyDatasets(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func datasetMultiple(ctx *context.Context, opts *models.SearchDatasetOptions) {
page := ctx.QueryInt("page")
keyword := strings.Trim(ctx.Query("q"), " ")
@@ -593,180 +503,6 @@ func ReferenceDatasetData(ctx *context.Context) {

}

func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")

datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
return
}

data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}

func MyFavoriteDataset(ctx *context.Context) {
UserId := ctx.User.ID
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
var NotColDatasetIDs []int64
var IsColDatasetIDs []int64
datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
//If the dataset has been deleted, it will not be counted
for _, datasetStar := range datasetStars {
IsExist, repo, dataset, err := IsDatasetStarExist(datasetStar)
if err != nil {
log.Error("IsDatasetStarExist error:", err.Error())
}
if IsExist {
DatasetIsCollaborator := DatasetIsCollaborator(ctx, dataset)
if repo.OwnerID == ctx.User.ID || DatasetIsCollaborator {
IsColDatasetIDs = append(IsColDatasetIDs, datasetStar.DatasetID)
} else {
NotColDatasetIDs = append(NotColDatasetIDs, datasetStar.DatasetID)
}
}
}

NotColDatasets, NotColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: NotColDatasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
//If is collaborator, there is no need to determine whether the dataset is private or public
IsColDatasets, IsColcount, err := models.Attachments(&models.AttachmentsOptions{
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: IsColDatasetIDs,
NeedIsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
UserId: UserId,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
for _, NotColDataset := range NotColDatasets {
IsColDatasets = append(IsColDatasets, NotColDataset)
}
datasets := IsColDatasets
count := NotColcount + IsColcount
sort.Slice(datasets, func(i, j int) bool {
return datasets[i].Attachment.CreatedUnix > datasets[j].Attachment.CreatedUnix
})

page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = 5
}
pageDatasetsInfo := getPageDatasets(datasets, page, pagesize)
if pageDatasetsInfo == nil {
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": "[]",
"count": strconv.FormatInt(count, 10),
})
return
}
data, err := json.Marshal(pageDatasetsInfo)
log.Info("data:", data)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})

}
func getPageDatasets(AttachmentInfos []*models.AttachmentInfo, page int, pagesize int) []*models.AttachmentInfo {
begin := (page - 1) * pagesize
end := (page) * pagesize

if begin > len(AttachmentInfos)-1 {
return nil
}
if end > len(AttachmentInfos)-1 {
return AttachmentInfos[begin:]
} else {
return AttachmentInfos[begin:end]
}

}
func getTotalPage(total int64, pageSize int) int {

another := 0
if int(total)%pageSize != 0 {
another = 1
}
return int(total)/pageSize + another

}

func GetDatasetStatus(ctx *context.Context) {

var (
@@ -791,55 +527,3 @@ func GetDatasetStatus(ctx *context.Context) {
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}
func DatasetIsCollaborator(ctx *context.Context, dataset *models.Dataset) bool {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("query repo error:", err.Error())
} else {
repo.GetOwner()
if ctx.User != nil {
if repo.Owner.IsOrganization() {
org := repo.Owner
org.Teams, err = org.GetUserTeams(ctx.User.ID)
if err != nil {
log.Error("GetUserTeams error:", err.Error())
return false
}
if org.IsUserPartOfOrg(ctx.User.ID) {
for _, t := range org.Teams {
if t.IsMember(ctx.User.ID) && t.HasRepository(repo.ID) {
return true
}
}
isOwner, _ := models.IsOrganizationOwner(repo.OwnerID, ctx.User.ID)
if isOwner {
return isOwner
}
return false
}
}

isCollaborator, _ := repo.IsCollaborator(ctx.User.ID)
if isCollaborator {
return true
}
}
}

return false
}
func IsDatasetStarExist(datasetStar *models.DatasetStar) (bool, *models.Repository, *models.Dataset, error) {
dataset, err := models.GetDatasetByID(datasetStar.DatasetID)
if err != nil {
log.Error("query dataset error:", err.Error())
return false, nil, nil, err
} else {
repo, err := models.GetRepositoryByID(dataset.RepoID)
if err != nil {
log.Error("GetRepositoryByID error:", err.Error())
return false, nil, nil, err
}
return true, repo, dataset, nil
}

}

+ 3
- 50
routers/repo/grampus.go View File

@@ -474,7 +474,7 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain

}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeGPU)
@@ -509,28 +509,6 @@ func GrampusTrainJobVersionCreate(ctx *context.Context, form auth.CreateGrampusT

}

func checkSpecialPool(ctx *context.Context, resourceType string) string {
grampus.InitSpecialPool()
if grampus.SpecialPools != nil {
for _, pool := range grampus.SpecialPools.Pools {

if pool.IsExclusive && pool.Type == resourceType {

org, _ := models.GetOrgByName(pool.Org)
if org != nil {
isOrgMember, _ := models.IsOrganizationMember(org.ID, ctx.User.ID)
if !isOrgMember {
return ctx.Tr("repo.grampus.no_operate_right")
}
}
}

}

}
return ""
}

func GrampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrainJobForm) {
ctx.Data["IsCreate"] = true
grampusTrainJobNpuCreate(ctx, form)
@@ -733,7 +711,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain
req.PreTrainModelPath = preTrainModelPath
}

err = grampus.GenerateTrainJob(ctx, req)
_, err = grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
grampusTrainJobNewDataPrepare(ctx, grampus.ProcessorTypeNPU)
@@ -1003,7 +981,7 @@ func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bo
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := generateDatasetUnzipCommand(datasetName)
unZipDatasetCommand := cloudbrainTask.GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}
@@ -1077,31 +1055,6 @@ func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileNa
return commandDownloadTemp
}

func generateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
return unZipDatasetCommand
}

func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP
archivePath := codePath


+ 2
- 2
routers/repo/modelarts.go View File

@@ -1230,7 +1230,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
return
}

err = modelarts.GenerateTrainJob(ctx, req)
_, err = modelarts.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
trainJobNewDataPrepare(ctx)
@@ -2205,7 +2205,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
req.UserCommand = userCommand
req.UserImageUrl = userImageUrl

err = modelarts.GenerateInferenceJob(ctx, req)
_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
inferenceJobErrorNewDataPrepare(ctx, form)


+ 30
- 0
routers/response/api_response.go View File

@@ -0,0 +1,30 @@
package response

type AiforgeOuterResponse struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data interface{} `json:"data"`
}

func OuterSuccess() *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS}
}

func OuterError(code int, msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg}
}

func OuterServerError(msg string) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func OuterBizError(err *BizError) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: err.Code, Msg: err.Err}
}

func OuterSuccessWithData(data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}
func OuterErrorWithData(code int, msg string, data interface{}) *AiforgeOuterResponse {
return &AiforgeOuterResponse{Code: code, Msg: msg, Data: data}
}

+ 5
- 1
routers/response/response.go View File

@@ -24,10 +24,14 @@ func ServerError(msg string) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: msg}
}

func ResponseError(err *BizError) *AiforgeResponse {
func ResponseBizError(err *BizError) *AiforgeResponse {
return &AiforgeResponse{Code: err.Code, Msg: err.Err}
}

func ResponseError(err error) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_ERROR_DEFAULT, Msg: err.Error()}
}

func SuccessWithData(data interface{}) *AiforgeResponse {
return &AiforgeResponse{Code: RESPONSE_CODE_SUCCESS, Msg: RESPONSE_MSG_SUCCESS, Data: data}
}


+ 2
- 2
routers/response/response_list.go View File

@@ -1,6 +1,7 @@
package response

//repo response
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error"}

var RESOURCE_QUEUE_NOT_AVAILABLE = &BizError{Code: 1001, Err: "resource queue not available"}
var SPECIFICATION_NOT_EXIST = &BizError{Code: 1002, Err: "specification not exist"}
var SPECIFICATION_NOT_AVAILABLE = &BizError{Code: 1003, Err: "specification not available"}
@@ -11,4 +12,3 @@ var BADGES_STILL_HAS_USERS = &BizError{Code: 1005, Err: "Please delete users of
//common response
var SYSTEM_ERROR = &BizError{Code: 9009, Err: "System error.Please try again later"}
var INSUFFICIENT_PERMISSION = &BizError{Code: 9003, Err: "insufficient permissions"}
var PARAM_ERROR = &BizError{Code: 9001, Err: "param error permissions"}

+ 0
- 4
routers/routes/routes.go View File

@@ -1127,10 +1127,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/edit/:id", reqRepoDatasetWriter, repo.EditDataset)
m.Post("/reference_datasets", reqRepoDatasetWriterJson, bindIgnErr(auth.ReferenceDatasetForm{}), repo.ReferenceDatasetPost)
m.Post("/edit", reqRepoDatasetWriter, bindIgnErr(auth.EditDatasetForm{}), repo.EditDatasetPost)
m.Get("/current_repo", repo.CurrentRepoDataset)
m.Get("/my_datasets", repo.MyDatasets)
m.Get("/public_datasets", repo.PublicDataset)
m.Get("/my_favorite", repo.MyFavoriteDataset)

m.Get("/current_repo_m", repo.CurrentRepoDatasetMultiple)
m.Get("/my_datasets_m", repo.MyDatasetsMultiple)


+ 12
- 12
services/cloudbrain/cloudbrainTask/count.go View File

@@ -14,28 +14,28 @@ type StatusInfo struct {
ComputeResource string
}

var cloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var cloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var grampusTwoNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var CloudbrainOneNotFinalStatuses = []string{string(models.JobWaiting), string(models.JobRunning)}
var CloudbrainTwoNotFinalStatuses = []string{string(models.ModelArtsTrainJobInit), string(models.ModelArtsTrainJobImageCreating), string(models.ModelArtsTrainJobSubmitTrying), string(models.ModelArtsTrainJobWaiting), string(models.ModelArtsTrainJobRunning), string(models.ModelArtsTrainJobScaling), string(models.ModelArtsTrainJobCheckInit), string(models.ModelArtsTrainJobCheckRunning), string(models.ModelArtsTrainJobCheckRunningCompleted)}
var GrampusNotFinalStatuses = []string{models.GrampusStatusWaiting, models.GrampusStatusRunning}
var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeDebug},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeBenchmark) + "-" + strconv.Itoa(models.TypeCloudBrainOne): {
CloudBrainTypes: []int{models.TypeCloudBrainOne},
JobType: []models.JobType{models.JobTypeBenchmark, models.JobTypeBrainScore, models.JobTypeSnn4imagenet},
NotFinalStatuses: cloudbrainOneNotFinalStatuses,
NotFinalStatuses: CloudbrainOneNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeDebug) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo, models.TypeCDCenter},
@@ -45,22 +45,22 @@ var StatusInfoDict = map[string]StatusInfo{string(models.JobTypeDebug) + "-" + s
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeInference) + "-" + strconv.Itoa(models.TypeCloudBrainTwo): {
CloudBrainTypes: []int{models.TypeCloudBrainTwo},
JobType: []models.JobType{models.JobTypeInference},
NotFinalStatuses: cloudbrainTwoNotFinalStatuses,
NotFinalStatuses: CloudbrainTwoNotFinalStatuses,
ComputeResource: models.NPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.GPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.GPUResource,
}, string(models.JobTypeTrain) + "-" + strconv.Itoa(models.TypeC2Net) + "-" + models.NPUResource: {
CloudBrainTypes: []int{models.TypeC2Net},
JobType: []models.JobType{models.JobTypeTrain},
NotFinalStatuses: grampusTwoNotFinalStatuses,
NotFinalStatuses: GrampusNotFinalStatuses,
ComputeResource: models.NPUResource,
}}

@@ -71,7 +71,7 @@ func GetNotFinalStatusTaskCount(uid int64, cloudbrainType int, jobType string, c
}

key := jobNewType + "-" + strconv.Itoa(cloudbrainType)
if len(computeResource) > 0 {
if len(computeResource) > 0 && cloudbrainType == models.TypeC2Net {
key = key + "-" + computeResource[0]
}



+ 631
- 0
services/cloudbrain/cloudbrainTask/inference.go View File

@@ -0,0 +1,631 @@
package cloudbrainTask

import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"strconv"
"strings"
"unicode/utf8"

"code.gitea.io/gitea/modules/modelarts"

"code.gitea.io/gitea/modules/git"

api "code.gitea.io/gitea/modules/structs"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"
)

const CLONE_FILE_PREFIX = "file:///"

func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
image := strings.TrimSpace(option.Image)
uuid := option.Attachment
jobType := string(models.JobTypeInference)
codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
branchName := option.BranchName
bootFile := strings.TrimSpace(option.BootFile)
labelName := option.LabelName
repo := ctx.Repo.Repository

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
defer lock.UnLock()
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}

ckptUrl := setting.Attachment.Minio.RealPath + option.PreTrainModelUrl + option.CkptName
log.Info("ckpt url:" + ckptUrl)
command, err := getInferenceJobCommand(option)
if err != nil {
log.Error("getTrainJobCommand failed: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, jobType, displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

if !jobNamePattern.MatchString(displayJobName) {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_jobname_err")))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainOne, jobType)
if err != nil {
log.Error("GetCloudbrainCountByUserID failed:%v", err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain.morethanonejob")))
return
}
}

if branchName == "" {
branchName = cloudbrain.DefaultBranchName
}
errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ResultPath)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

datasetInfos, datasetNames, err := models.GetDatasetInfo(uuid)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.error.dataset_select")))
return
}
spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.GPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainOne})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification is not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}
req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx,
DisplayJobName: displayJobName,
JobName: jobName,
Image: image,
Command: command,
Uuids: uuid,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
ModelPath: setting.Attachment.Minio.RealPath + option.PreTrainModelUrl,
BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
JobType: jobType,
Description: option.Description,
BranchName: branchName,
BootFile: option.BootFile,
Params: option.Params,
CommitID: commitID,
ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
ModelName: option.ModelName,
ModelVersion: option.ModelVersion,
CkptName: option.CkptName,
TrainUrl: option.PreTrainModelUrl,
LabelName: labelName,
Spec: spec,
}

jobId, err := cloudbrain.GenerateTask(req)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {
ctx.Data["PageIsTrainJob"] = true
VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
workServerNumber := option.WorkServerNumber
engineID, _ := strconv.Atoi(option.ImageID)
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath
resultObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.ResultPath + VersionOutputPath + "/"
logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/"
//dataPath := "/" + setting.Bucket + "/" + setting.BasePath + path.Join(uuid[0:1], uuid[1:2]) + "/" + uuid + uuid + "/"
branchName := option.BranchName
EngineName := option.Image
LabelName := option.LabelName
isLatestVersion := modelarts.IsLatestVersion
VersionCount := modelarts.VersionCountOne
trainUrl := option.PreTrainModelUrl
modelName := option.ModelName
modelVersion := option.ModelVersion
ckptName := option.CkptName
ckptUrl := "/" + option.PreTrainModelUrl + option.CkptName

errStr := checkInferenceJobMultiNode(ctx.User.ID, option.WorkServerNumber)
if errStr != "" {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeInference), displayJobName))
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_samejob_err")))
return
}
defer lock.UnLock()

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, models.TypeCloudBrainTwo, string(models.JobTypeInference))
if err != nil {
log.Error("GetCloudbrainInferenceJobCountByUserID failed:%v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
} else {
if count >= 1 {
log.Error("the user already has running or waiting inference task", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("you have already a running or waiting inference task, can not create more"))
return
}
}

if err := paramCheckCreateInferenceJob(option); err != nil {
log.Error("paramCheckCreateInferenceJob failed:(%v)", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

bootFileExist, err := ctx.Repo.FileExists(bootFile, branchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.cloudbrain_bootfile_err")))
return
}

//Determine whether the task name of the task in the project is duplicated
tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeInference), displayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("the job name did already exist"))
return
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("system error"))
return
}
}

spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeInference,
ComputeResource: models.NPU,
Cluster: models.OpenICluster,
AiCenterCode: models.AICenterOfCloudBrainTwo})
if err != nil || spec == nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Resource specification not available"))
return
}
if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d ", ctx.User.ID, spec.ID)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("points.insufficient_points_balance")))
return
}

//todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branchName)

if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.ResultPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_result: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_result"))
return
}

if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_log"))
return
}

if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var parameters models.Parameters
param := make([]models.Parameter, 0)
param = append(param, models.Parameter{
Label: modelarts.ResultUrl,
Value: "s3:/" + resultObsPath,
}, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})

datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json error:"+err.Error()))
return
}
if isMultiDataset {
param = append(param, models.Parameter{
Label: modelarts.MultiDataUrl,
Value: string(jsondatas),
})
}

existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("运行参数错误"))
return
}

for _, parameter := range parameters.Parameter {
if parameter.Label == modelarts.DeviceTarget {
existDeviceTarget = true
}
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
Value: parameter.Value,
})
}
}
}
if !existDeviceTarget {
param = append(param, models.Parameter{
Label: modelarts.DeviceTarget,
Value: modelarts.Ascend,
})
}

req := &modelarts.GenerateInferenceJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
DataUrl: dataPath,
Description: description,
CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile,
BootFile: bootFile,
TrainUrl: trainUrl,
WorkServerNumber: workServerNumber,
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: getPoolId(),
Uuid: uuid,
Parameters: param, //modelarts train parameters
CommitID: commitID,
BranchName: branchName,
Params: option.Params,
EngineName: EngineName,
LabelName: LabelName,
IsLatestVersion: isLatestVersion,
VersionCount: VersionCount,
TotalVersionCount: modelarts.TotalVersionCount,
ModelName: modelName,
ModelVersion: modelVersion,
CkptName: ckptName,
ResultUrl: resultObsPath,
Spec: spec,
DatasetName: datasetNames,
JobType: string(models.JobTypeInference),
}

jobId, err := modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func getDatasUrlListByUUIDS(uuidStr string) ([]models.Datasurl, string, string, bool, error) {
var isMultiDataset bool
var dataUrl string
var datasetNames string
var datasUrlList []models.Datasurl
uuids := strings.Split(uuidStr, ";")
if len(uuids) > setting.MaxDatasetNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset count exceed the limit")
}

datasetInfos := make(map[string]models.DatasetInfo)
attachs, err := models.GetAttachmentsByUUIDs(uuids)
if err != nil || len(attachs) != len(uuids) {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}

for i, tmpUuid := range uuids {
var attach *models.Attachment
for _, tmpAttach := range attachs {
if tmpAttach.UUID == tmpUuid {
attach = tmpAttach
break
}
}
if attach == nil {
log.Error("GetAttachmentsByUUIDs failed: %v", err)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("GetAttachmentsByUUIDs failed")
}
fileName := strings.TrimSuffix(strings.TrimSuffix(strings.TrimSuffix(attach.Name, ".zip"), ".tar.gz"), ".tgz")
for _, datasetInfo := range datasetInfos {
if fileName == datasetInfo.Name {
log.Error("the dataset name is same: %v", attach.Name)
return datasUrlList, dataUrl, datasetNames, isMultiDataset, errors.New("the dataset name is same")
}
}
if len(attachs) <= 1 {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
isMultiDataset = false
} else {
dataUrl = "/" + setting.Bucket + "/" + setting.BasePath + path.Join(attachs[0].UUID[0:1], attachs[0].UUID[1:2]) + "/" + attachs[0].UUID + attachs[0].UUID + "/"
datasetUrl := "s3://" + setting.Bucket + "/" + setting.BasePath + path.Join(attach.UUID[0:1], attach.UUID[1:2]) + "/" + attach.UUID + attach.UUID + "/"
datasUrlList = append(datasUrlList, models.Datasurl{
DatasetUrl: datasetUrl,
DatasetName: fileName,
})
isMultiDataset = true
}

if i == 0 {
datasetNames = attach.Name
} else {
datasetNames += ";" + attach.Name
}
}

return datasUrlList, dataUrl, datasetNames, isMultiDataset, nil
}
func checkInferenceJobMultiNode(userId int64, serverNum int) string {
if serverNum == 1 {
return ""
}

return "repo.modelarts.no_node_right"

}

func paramCheckCreateInferenceJob(option api.CreateTrainJobOption) error {
if !strings.HasSuffix(strings.TrimSpace(option.BootFile), ".py") {
log.Error("the boot file(%s) must be a python file", strings.TrimSpace(option.BootFile))
return errors.New("启动文件必须是python文件")
}

if option.ModelName == "" {
log.Error("the ModelName(%d) must not be nil", option.ModelName)
return errors.New("模型名称不能为空")
}
if option.ModelVersion == "" {
log.Error("the ModelVersion(%d) must not be nil", option.ModelVersion)
return errors.New("模型版本不能为空")
}
if option.CkptName == "" {
log.Error("the CkptName(%d) must not be nil", option.CkptName)
return errors.New("权重文件不能为空")
}
if option.BranchName == "" {
log.Error("the Branch(%d) must not be nil", option.BranchName)
return errors.New("分支名不能为空")
}

if utf8.RuneCountInString(option.Description) > 255 {
log.Error("the Description length(%d) must not more than 255", option.Description)
return errors.New("描述字符不能超过255个字符")
}

return nil
}

func loadCodeAndMakeModelPath(repo *models.Repository, codePath string, branchName string, jobName string, resultPath string) string {
err := downloadCode(repo, codePath, branchName)
if err != nil {
return "cloudbrain.load_code_failed"
}

err = uploadCodeToMinio(codePath+"/", jobName, cloudbrain.CodeMountPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

modelPath := setting.JobPath + jobName + resultPath + "/"
err = mkModelPath(modelPath)
if err != nil {
return "cloudbrain.load_code_failed"
}
err = uploadCodeToMinio(modelPath, jobName, resultPath+"/")
if err != nil {
return "cloudbrain.load_code_failed"
}

return ""
}

func downloadCode(repo *models.Repository, codePath, branchName string) error {
//add "file:///" prefix to make the depth valid
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}

configFile, err := os.OpenFile(codePath+"/.git/config", os.O_RDWR, 0666)
if err != nil {
log.Error("open file(%s) failed:%v", codePath+"/,git/config", err)
return err
}

defer configFile.Close()

pos := int64(0)
reader := bufio.NewReader(configFile)
for {
line, err := reader.ReadString('\n')
if err != nil {
if err == io.EOF {
log.Error("not find the remote-url")
return nil
} else {
log.Error("read error: %v", err)
return err
}
}

if strings.Contains(line, "url") && strings.Contains(line, ".git") {
originUrl := "\turl = " + repo.CloneLink().HTTPS + "\n"
if len(line) > len(originUrl) {
originUrl += strings.Repeat(" ", len(line)-len(originUrl))
}
bytes := []byte(originUrl)
_, err := configFile.WriteAt(bytes, pos)
if err != nil {
log.Error("WriteAt failed:%v", err)
return err
}
break
}

pos += int64(len(line))
}

return nil
}

func getInferenceJobCommand(option api.CreateTrainJobOption) (string, error) {
var command string
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params

if !strings.HasSuffix(bootFile, ".py") {
log.Error("bootFile(%s) format error", bootFile)
return command, errors.New("bootFile format error")
}

var parameters models.Parameters
var param string
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)
return command, err
}

for _, parameter := range parameters.Parameter {
param += " --" + parameter.Label + "=" + parameter.Value
}
}

param += " --modelname" + "=" + option.CkptName

command += "python /code/" + bootFile + param + " > " + cloudbrain.ResultPath + "/" + option.DisplayJobName + "-" + cloudbrain.LogFile

return command, nil
}

+ 1210
- 0
services/cloudbrain/cloudbrainTask/train.go View File

@@ -0,0 +1,1210 @@
package cloudbrainTask

import (
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"path"
"regexp"
"strconv"
"strings"

"code.gitea.io/gitea/modules/urfs_client/urchin"

"code.gitea.io/gitea/modules/timeutil"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/obs"

"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/storage"
"github.com/unknwon/com"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/reward/point/account"
)

var jobNamePattern = regexp.MustCompile(`^[a-z0-9][a-z0-9-_]{1,34}[a-z0-9-]$`)

const TaskTypeCloudbrainOne = 0
const TaskTypeModelArts = 1
const TaskTypeGrampusGPU = 2
const TaskTypeGrampusNPU = 3

func CloudbrainOneTrainJobCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
image := strings.TrimSpace(option.Image)
uuids := option.Attachment
jobType := string(models.JobTypeTrain)

codePath := setting.JobPath + jobName + cloudbrain.CodeMountPath
branchName := option.BranchName
repo := ctx.Repo.Repository

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), jobType, displayJobName))
defer lock.UnLock()
spec, datasetInfos, datasetNames, err := checkParameters(ctx, option, lock, repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

command, err := getTrainJobCommand(option)
if err != nil {
log.Error("getTrainJobCommand failed: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

errStr := loadCodeAndMakeModelPath(repo, codePath, branchName, jobName, cloudbrain.ModelMountPath)
if errStr != "" {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

req := cloudbrain.GenerateCloudBrainTaskReq{
Ctx: ctx,
DisplayJobName: displayJobName,
JobName: jobName,
Image: image,
Command: command,
Uuids: uuids,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"),
ModelPath: storage.GetMinioPath(jobName, cloudbrain.ModelMountPath+"/"),
BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"),
Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"),
BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"),
JobType: jobType,
Description: option.Description,
BranchName: branchName,
BootFile: option.BootFile,
Params: option.Params,
CommitID: commitID,
BenchmarkTypeID: 0,
BenchmarkChildTypeID: 0,
ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"),
Spec: spec,
}

if option.ModelName != "" { //使用预训练模型训练
req.ModelName = option.ModelName
req.LabelName = option.LabelName
req.CkptName = option.CkptName
req.ModelVersion = option.ModelVersion
req.PreTrainModelPath = setting.Attachment.Minio.RealPath + option.PreTrainModelUrl
req.PreTrainModelUrl = option.PreTrainModelUrl

}

jobId, err := cloudbrain.GenerateTask(req)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{
Code: 0,
Message: jobId,
})
}
func ModelArtsTrainJobNpuCreate(ctx *context.Context, option api.CreateTrainJobOption) {
VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount)
displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
workServerNumber := option.WorkServerNumber
engineID, _ := strconv.Atoi(option.ImageID)
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.CodePath + VersionOutputPath + "/"
outputObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.OutputPath + VersionOutputPath + "/"
logObsPath := "/" + setting.Bucket + modelarts.JobPath + jobName + modelarts.LogPath + VersionOutputPath + "/"
branchName := option.BranchName
isLatestVersion := modelarts.IsLatestVersion
VersionCount := modelarts.VersionCountOne
EngineName := option.Image

errStr := checkMultiNode(ctx.User.ID, option.WorkServerNumber)
if errStr != "" {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr(errStr)))
return
}

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
defer lock.UnLock()

spec, _, _, err := checkParameters(ctx, option, lock, repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

//todo: del the codeLocalPath
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branchName)

if err := downloadCode(repo, codeLocalPath, branchName); err != nil {
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_output"))
return
}

if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.LogPath + VersionOutputPath + "/"); err != nil {
log.Error("Failed to obsMkdir_log: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to obsMkdir_log"))
return
}

parentDir := VersionOutputPath + "/"
if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil {
// if err := uploadCodeToObs(codeLocalPath, jobName, parentDir); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var parameters models.Parameters
param := make([]models.Parameter, 0)
existDeviceTarget := false
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("运行参数错误"))
return
}

for _, parameter := range parameters.Parameter {
if parameter.Label == modelarts.DeviceTarget {
existDeviceTarget = true
}
if parameter.Label != modelarts.TrainUrl && parameter.Label != modelarts.DataUrl {
param = append(param, models.Parameter{
Label: parameter.Label,
Value: parameter.Value,
})
}
}
}
if !existDeviceTarget {
param = append(param, models.Parameter{
Label: modelarts.DeviceTarget,
Value: modelarts.Ascend,
})
}
datasUrlList, dataUrl, datasetNames, isMultiDataset, err := getDatasUrlListByUUIDS(uuid)
if err != nil {
log.Error("Failed to getDatasUrlListByUUIDS: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Failed to getDatasUrlListByUUIDS:"+err.Error()))
return
}
dataPath := dataUrl
jsondatas, err := json.Marshal(datasUrlList)
if err != nil {
log.Error("Failed to Marshal: %v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json error:"+err.Error()))
return
}
if isMultiDataset {
param = append(param, models.Parameter{
Label: modelarts.MultiDataUrl,
Value: string(jsondatas),
})
}
if option.ModelName != "" { //使用预训练模型训练
ckptUrl := "/" + option.PreTrainModelUrl + option.CkptName
param = append(param, models.Parameter{
Label: modelarts.CkptUrl,
Value: "s3:/" + ckptUrl,
})
}

req := &modelarts.GenerateTrainJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
DataUrl: dataPath,
Description: description,
CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile,
BootFile: bootFile,
TrainUrl: outputObsPath,
WorkServerNumber: workServerNumber,
EngineID: int64(engineID),
LogUrl: logObsPath,
PoolID: getPoolId(),
Uuid: uuid,
Parameters: param,
CommitID: commitID,
IsLatestVersion: isLatestVersion,
BranchName: branchName,
Params: option.Params,
EngineName: EngineName,
VersionCount: VersionCount,
TotalVersionCount: modelarts.TotalVersionCount,
DatasetName: datasetNames,
Spec: spec,
}
if option.ModelName != "" { //使用预训练模型训练
req.ModelName = option.ModelName
req.LabelName = option.LabelName
req.CkptName = option.CkptName
req.ModelVersion = option.ModelVersion
req.PreTrainModelUrl = option.PreTrainModelUrl

}

userCommand, userImageUrl := getUserCommand(engineID, req)
req.UserCommand = userCommand
req.UserImageUrl = userImageUrl

//将params转换Parameters.Parameter,出错时返回给前端
var Parameters modelarts.Parameters
if err := json.Unmarshal([]byte(params), &Parameters); err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("json.Unmarshal failed:"+err.Error()))
return
}

jobId, err := modelarts.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{
Code: 0,
Message: jobId,
})

}

func GrampusTrainJobGpuCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/"
codeMinioPath := setting.CBCodePathPrefix + jobName + cloudbrain.CodeMountPath + "/"
branchName := option.BranchName
image := strings.TrimSpace(option.Image)

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
defer lock.UnLock()
spec, datasetInfos, datasetNames, err := checkParameters(ctx, option, lock, repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

//prepare code and out path
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))

}

//todo: upload code (send to file_server todo this work?)
//upload code
if err := uploadCodeToMinio(codeLocalPath+"/", jobName, cloudbrain.CodeMountPath+"/"); err != nil {
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

modelPath := setting.JobPath + jobName + cloudbrain.ModelMountPath + "/"
if err := mkModelPath(modelPath); err != nil {
log.Error("Failed to mkModelPath: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//init model readme
if err := uploadCodeToMinio(modelPath, jobName, cloudbrain.ModelMountPath+"/"); err != nil {
log.Error("Failed to uploadCodeToMinio: %s (%v)", repo.FullName(), err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var datasetRemotePath, allFileName string
for _, datasetInfo := range datasetInfos {
if datasetRemotePath == "" {
datasetRemotePath = datasetInfo.DataLocalPath
allFileName = datasetInfo.FullName
} else {
datasetRemotePath = datasetRemotePath + ";" + datasetInfo.DataLocalPath
allFileName = allFileName + ";" + datasetInfo.FullName
}

}

//prepare command
preTrainModelPath := getPreTrainModelPath(option.PreTrainModelUrl, option.CkptName)

command, err := generateCommand(repo.Name, grampus.ProcessorTypeGPU, codeMinioPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CBCodePathPrefix+jobName+cloudbrain.ModelMountPath+"/", allFileName, preTrainModelPath, option.CkptName, "")
if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Create task failed, internal error"))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

req := &grampus.GenerateTrainJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
ComputeResource: models.GPUResource,
ProcessType: grampus.ProcessorTypeGPU,
Command: command,
ImageUrl: image,
Description: description,
BootFile: bootFile,
Uuid: uuid,
CommitID: commitID,
BranchName: branchName,
Params: option.Params,
EngineName: image,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,

IsLatestVersion: modelarts.IsLatestVersion,
VersionCount: modelarts.VersionCountOne,
WorkServerNumber: 1,
Spec: spec,
}

if option.ModelName != "" { //使用预训练模型训练
req.ModelName = option.ModelName
req.LabelName = option.LabelName
req.CkptName = option.CkptName
req.ModelVersion = option.ModelVersion
req.PreTrainModelUrl = option.PreTrainModelUrl

}

jobId, err := grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error(), ctx.Data["MsgID"])
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func checkParameters(ctx *context.Context, option api.CreateTrainJobOption, lock *redis_lock.DistributeLock, repo *models.Repository) (*models.Specification, map[string]models.DatasetInfo, string, error) {
isOk, err := lock.Lock(models.CloudbrainKeyDuration)
if !isOk {
log.Error("lock processed failed:%v", err, ctx.Data["MsgID"])

return nil, nil, "", fmt.Errorf(ctx.Tr("repo.cloudbrain_samejob_err"))
}

if !jobNamePattern.MatchString(option.DisplayJobName) {
return nil, nil, "", fmt.Errorf(ctx.Tr("repo.cloudbrain_jobname_err"))
}

bootFileExist, err := ctx.Repo.FileExists(option.BootFile, option.BranchName)
if err != nil || !bootFileExist {
log.Error("Get bootfile error:", err, ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf(ctx.Tr("repo.cloudbrain_bootfile_err"))
}

computeResource := models.GPUResource
if isNpuTask(option) {
computeResource = models.NPUResource
}

//check count limit
taskType := option.Type
if isC2NetTask(option) {
taskType = 2
}

count, err := GetNotFinalStatusTaskCount(ctx.User.ID, taskType, string(models.JobTypeTrain), computeResource)
if err != nil {
log.Error("GetCountByUserID failed:%v", err, ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf("system error")
} else {
if count >= 1 {
log.Error("the user already has running or waiting task", ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf("you have already a running or waiting task, can not create more.")
}
}

//check param
if err := paramCheckCreateTrainJob(option.BootFile, option.BranchName); err != nil {
log.Error("paramCheckCreateTrainJob failed:(%v)", err, ctx.Data["MsgID"])
return nil, nil, "", err
}

//check whether the task name in the project is duplicated
tasks, err := models.GetCloudbrainsByDisplayJobName(repo.ID, string(models.JobTypeTrain), option.DisplayJobName)
if err == nil {
if len(tasks) != 0 {
log.Error("the job name did already exist", ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf("The job name did already exist.")
}
} else {
if !models.IsErrJobNotExist(err) {
log.Error("system error, %v", err, ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf("system error")
}
}

//check specification
computeType := models.GPU

if isNpuTask(option) {
computeType = models.NPU
}
cluster := models.OpenICluster
if isC2NetTask(option) {
cluster = models.C2NetCluster
}
aiCenterCode := ""
if option.Type == TaskTypeCloudbrainOne {
aiCenterCode = models.AICenterOfCloudBrainOne
} else if option.Type == TaskTypeModelArts {
aiCenterCode = models.AICenterOfCloudBrainTwo
}

spec, err := resource.GetAndCheckSpec(ctx.User.ID, option.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeTrain,
ComputeResource: computeType,
Cluster: cluster,
AiCenterCode: aiCenterCode,
})
if err != nil || spec == nil {
return nil, nil, "", fmt.Errorf("Resource specification is not available.")
}

if !account.IsPointBalanceEnough(ctx.User.ID, spec.UnitPrice) {
log.Error("point balance is not enough,userId=%d specId=%d", ctx.User.ID, spec.ID)
return nil, nil, "", fmt.Errorf(ctx.Tr("points.insufficient_points_balance"))
}

//check dataset
var datasetInfos map[string]models.DatasetInfo
var datasetNames string
if option.Type != TaskTypeModelArts {
if isC2NetTask(option) {
datasetInfos, datasetNames, err = models.GetDatasetInfo(option.Attachment, computeType)
} else {
datasetInfos, datasetNames, err = models.GetDatasetInfo(option.Attachment)
}

if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
return nil, nil, "", fmt.Errorf(ctx.Tr("cloudbrain.error.dataset_select"))
}
}
return spec, datasetInfos, datasetNames, err
}

func isNpuTask(option api.CreateTrainJobOption) bool {
return option.Type == TaskTypeModelArts || option.Type == TaskTypeGrampusNPU
}

func isC2NetTask(option api.CreateTrainJobOption) bool {
return option.Type == TaskTypeGrampusGPU || option.Type == TaskTypeGrampusNPU
}

func GrampusTrainJobNpuCreate(ctx *context.Context, option api.CreateTrainJobOption) {

displayJobName := option.DisplayJobName
jobName := util.ConvertDisplayJobNameToJobName(displayJobName)
uuid := option.Attachment
description := option.Description
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params
repo := ctx.Repo.Repository
codeLocalPath := setting.JobPath + jobName + modelarts.CodePath
codeObsPath := grampus.JobPath + jobName + modelarts.CodePath
branchName := option.BranchName
isLatestVersion := modelarts.IsLatestVersion
versionCount := modelarts.VersionCountOne
engineName := option.Image

lock := redis_lock.NewDistributeLock(redis_key.CloudbrainBindingJobNameKey(fmt.Sprint(repo.ID), string(models.JobTypeTrain), displayJobName))
defer lock.UnLock()
spec, datasetInfos, datasetNames, err := checkParameters(ctx, option, lock, repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}

//prepare code and out path
_, err = ioutil.ReadDir(codeLocalPath)
if err == nil {
os.RemoveAll(codeLocalPath)
}

if err := downloadZipCode(ctx, codeLocalPath, branchName); err != nil {
log.Error("downloadZipCode failed, server timed out: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

//todo: upload code (send to file_server todo this work?)
if err := obsMkdir(setting.CodePathPrefix + jobName + modelarts.OutputPath); err != nil {
log.Error("Failed to obsMkdir_output: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

if err := uploadCodeToObs(codeLocalPath, jobName, ""); err != nil {
log.Error("Failed to uploadCodeToObs: %s (%v)", repo.FullName(), err)

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("cloudbrain.load_code_failed")))
return
}

var datasetRemotePath, allFileName string
for _, datasetInfo := range datasetInfos {
if datasetRemotePath == "" {
datasetRemotePath = datasetInfo.DataLocalPath + "'" + datasetInfo.FullName + "'"
allFileName = datasetInfo.FullName
} else {
datasetRemotePath = datasetRemotePath + ";" + datasetInfo.DataLocalPath + "'" + datasetInfo.FullName + "'"
allFileName = allFileName + ";" + datasetInfo.FullName
}

}

//prepare command
preTrainModelPath := getPreTrainModelPath(option.PreTrainModelUrl, option.CkptName)
command, err := generateCommand(repo.Name, grampus.ProcessorTypeNPU, codeObsPath+cloudbrain.DefaultBranchName+".zip", datasetRemotePath, bootFile, params, setting.CodePathPrefix+jobName+modelarts.OutputPath, allFileName, preTrainModelPath, option.CkptName, grampus.GetNpuModelRemoteObsUrl(jobName))
if err != nil {
log.Error("Failed to generateCommand: %s (%v)", displayJobName, err, ctx.Data["MsgID"])

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi("Create task failed, internal error"))
return
}

commitID, _ := ctx.Repo.GitRepo.GetBranchCommitID(branchName)

req := &grampus.GenerateTrainJobReq{
JobName: jobName,
DisplayJobName: displayJobName,
ComputeResource: models.NPUResource,
ProcessType: grampus.ProcessorTypeNPU,
Command: command,
ImageId: option.ImageID,
Description: description,
CodeObsPath: codeObsPath,
BootFileUrl: codeObsPath + bootFile,
BootFile: bootFile,
WorkServerNumber: option.WorkServerNumber,
Uuid: uuid,
CommitID: commitID,
IsLatestVersion: isLatestVersion,
BranchName: branchName,
Params: option.Params,
EngineName: engineName,
VersionCount: versionCount,
TotalVersionCount: modelarts.TotalVersionCount,
DatasetNames: datasetNames,
DatasetInfos: datasetInfos,
Spec: spec,
CodeName: strings.ToLower(repo.Name),
}
if option.ModelName != "" { //使用预训练模型训练
req.ModelName = option.ModelName
req.LabelName = option.LabelName
req.CkptName = option.CkptName
req.ModelVersion = option.ModelVersion
req.PreTrainModelUrl = option.PreTrainModelUrl
req.PreTrainModelPath = preTrainModelPath
}

jobId, err := grampus.GenerateTrainJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())

ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(err.Error()))
return
}
ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId})
}

func obsMkdir(dir string) error {
input := &obs.PutObjectInput{}
input.Bucket = setting.Bucket
input.Key = dir
_, err := storage.ObsCli.PutObject(input)
if err != nil {
log.Error("PutObject(%s) failed: %s", input.Key, err.Error())
return err
}

return nil
}
func uploadCodeToObs(codePath, jobName, parentDir string) error {
files, err := readDir(codePath)
if err != nil {
log.Error("readDir(%s) failed: %s", codePath, err.Error())
return err
}

for _, file := range files {
if file.IsDir() {
input := &obs.PutObjectInput{}
input.Bucket = setting.Bucket
input.Key = parentDir + file.Name() + "/"
_, err = storage.ObsCli.PutObject(input)
if err != nil {
log.Error("PutObject(%s) failed: %s", input.Key, err.Error())
return err
}

if err = uploadCodeToObs(codePath+file.Name()+"/", jobName, parentDir+file.Name()+"/"); err != nil {
log.Error("uploadCodeToObs(%s) failed: %s", file.Name(), err.Error())
return err
}
} else {
input := &obs.PutFileInput{}
input.Bucket = setting.Bucket
input.Key = setting.CodePathPrefix + jobName + "/code/" + parentDir + file.Name()
input.SourceFile = codePath + file.Name()
_, err = storage.ObsCli.PutFile(input)
if err != nil {
log.Error("PutFile(%s) failed: %s", input.SourceFile, err.Error())
return err
}
}
}

return nil
}

func paramCheckCreateTrainJob(bootFile string, branchName string) error {
if !strings.HasSuffix(strings.TrimSpace(bootFile), ".py") {
log.Error("the boot file(%s) must be a python file", bootFile)
return errors.New("启动文件必须是python文件")
}

if branchName == "" {
log.Error("the branch must not be null!", branchName)
return errors.New("代码分支不能为空!")
}

return nil
}
func downloadZipCode(ctx *context.Context, codePath, branchName string) error {
archiveType := git.ZIP
archivePath := codePath

if !com.IsDir(archivePath) {
if err := os.MkdirAll(archivePath, os.ModePerm); err != nil {
log.Error("MkdirAll failed:" + err.Error())
return err
}
}

// Get corresponding commit.
var (
commit *git.Commit
err error
)

gitRepo := ctx.Repo.GitRepo
if err != nil {
log.Error("OpenRepository failed:" + err.Error())
return err
}

if gitRepo.IsBranchExist(branchName) {
commit, err = gitRepo.GetBranchCommit(branchName)
if err != nil {
log.Error("GetBranchCommit failed:" + err.Error())
return err
}
} else {
log.Error("the branch is not exist: " + branchName)
return fmt.Errorf("The branch does not exist.")
}

archivePath = path.Join(archivePath, grampus.CodeArchiveName)
if !com.IsFile(archivePath) {
if err := commit.CreateArchive(archivePath, git.CreateArchiveOpts{
Format: archiveType,
Prefix: setting.Repository.PrefixArchiveFiles,
}); err != nil {
log.Error("CreateArchive failed:" + err.Error())
return err
}
}

return nil
}

func uploadCodeToMinio(codePath, jobName, parentDir string) error {
files, err := readDir(codePath)
if err != nil {
log.Error("readDir(%s) failed: %s", codePath, err.Error())
return err
}

for _, file := range files {
if file.IsDir() {
if err = uploadCodeToMinio(codePath+file.Name()+"/", jobName, parentDir+file.Name()+"/"); err != nil {
log.Error("uploadCodeToMinio(%s) failed: %s", file.Name(), err.Error())
return err
}
} else {
destObject := setting.CBCodePathPrefix + jobName + parentDir + file.Name()
sourceFile := codePath + file.Name()
err = storage.Attachments.UploadObject(destObject, sourceFile)
if err != nil {
log.Error("UploadObject(%s) failed: %s", file.Name(), err.Error())
return err
}
}
}

return nil
}

func readDir(dirname string) ([]os.FileInfo, error) {
f, err := os.Open(dirname)
if err != nil {
return nil, err
}

list, err := f.Readdir(0)
f.Close()
if err != nil {
//todo: can not upload empty folder
if err == io.EOF {
return nil, nil
}
return nil, err
}

//sort.Slice(list, func(i, j int) bool { return list[i].Name() < list[j].Name() })
return list, nil
}
func mkModelPath(modelPath string) error {
return mkPathAndReadMeFile(modelPath, "You can put the files into this directory and download the files by the web page.")
}

func mkPathAndReadMeFile(path string, text string) error {
err := os.MkdirAll(path, os.ModePerm)
if err != nil {
log.Error("MkdirAll(%s) failed:%v", path, err)
return err
}

fileName := path + "README"
f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error("OpenFile failed", err.Error())
return err
}

defer f.Close()

_, err = f.WriteString(text)
if err != nil {
log.Error("WriteString failed", err.Error())
return err
}

return nil
}

func getPreTrainModelPath(pretrainModelDir string, fileName string) string {
index := strings.Index(pretrainModelDir, "/")
if index > 0 {
filterBucket := pretrainModelDir[index+1:]
return filterBucket + fileName
} else {
return ""
}

}

func generateCommand(repoName, processorType, codeRemotePath, dataRemotePath, bootFile, paramSrc, outputRemotePath, datasetName, pretrainModelPath, pretrainModelFileName, modelRemoteObsUrl string) (string, error) {
var command string

//prepare
workDir := grampus.NpuWorkDir
if processorType == grampus.ProcessorTypeNPU {
command += "pwd;cd " + workDir + grampus.CommandPrepareScriptNpu
} else if processorType == grampus.ProcessorTypeGPU {
workDir = grampus.GpuWorkDir
command += "pwd;cd " + workDir + fmt.Sprintf(grampus.CommandPrepareScriptGpu, setting.Grampus.SyncScriptProject, setting.Grampus.SyncScriptProject)
}

//download code & dataset
if processorType == grampus.ProcessorTypeNPU {
//no need to download code & dataset by internet
} else if processorType == grampus.ProcessorTypeGPU {
commandDownload := "./downloader_for_minio " + setting.Grampus.Env + " " + codeRemotePath + " " + grampus.CodeArchiveName + " '" + dataRemotePath + "' '" + datasetName + "'"
commandDownload = processPretrainModelParameter(pretrainModelPath, pretrainModelFileName, commandDownload)
command += commandDownload
}

//unzip code & dataset
if processorType == grampus.ProcessorTypeNPU {
//no need to process
} else if processorType == grampus.ProcessorTypeGPU {
unZipDatasetCommand := GenerateDatasetUnzipCommand(datasetName)
commandUnzip := "cd " + workDir + "code;unzip -q master.zip;rm -f master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip
}

command += "echo \"unzip finished;start to exec code;\";"

// set export
var commandExport string
if processorType == grampus.ProcessorTypeNPU {
commandExport = "export bucket=" + setting.Bucket + " && export remote_path=" + outputRemotePath + ";"
} else if processorType == grampus.ProcessorTypeGPU {
commandExport = "export env=" + setting.Grampus.Env + " && export remote_path=" + outputRemotePath + ";"
}

command += commandExport

//exec code
var parameters models.Parameters
var paramCode string

if len(paramSrc) != 0 {
err := json.Unmarshal([]byte(paramSrc), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", paramSrc, err)
return command, err
}

for _, parameter := range parameters.Parameter {
paramCode += " --" + parameter.Label + "=" + parameter.Value
}
}

var commandCode string
if processorType == grampus.ProcessorTypeNPU {
paramCode += " --model_url=" + modelRemoteObsUrl
commandCode = "/bin/bash /home/work/run_train_for_openi.sh /home/work/openi.py " + grampus.NpuLocalLogUrl + paramCode + ";"
} else if processorType == grampus.ProcessorTypeGPU {
if pretrainModelFileName != "" {
paramCode += " --ckpt_url" + "=" + workDir + "pretrainmodel/" + pretrainModelFileName
}
commandCode = "cd " + workDir + "code/" + strings.ToLower(repoName) + ";python " + bootFile + paramCode + ";"
}

command += commandCode

//get exec result
commandGetRes := "result=$?;"
command += commandGetRes

//upload models
if processorType == grampus.ProcessorTypeNPU {
// no need to upload
} else if processorType == grampus.ProcessorTypeGPU {
commandUpload := "cd " + workDir + setting.Grampus.SyncScriptProject + "/;./uploader_for_gpu " + setting.Grampus.Env + " " + outputRemotePath + " " + workDir + "output/;"
command += commandUpload
}

//check exec result
commandCheckRes := "bash -c \"[[ $result -eq 0 ]] && exit 0 || exit -1\""
command += commandCheckRes

return command, nil
}
func processPretrainModelParameter(pretrainModelPath string, pretrainModelFileName string, commandDownload string) string {
commandDownloadTemp := commandDownload
if pretrainModelPath != "" {
commandDownloadTemp += " '" + pretrainModelPath + "' '" + pretrainModelFileName + "'"
}
commandDownloadTemp += ";"
return commandDownloadTemp
}

func GenerateDatasetUnzipCommand(datasetName string) string {
var unZipDatasetCommand string

datasetNameArray := strings.Split(datasetName, ";")
if len(datasetNameArray) == 1 { //单数据集
unZipDatasetCommand = "unzip -q '" + datasetName + "';"
if strings.HasSuffix(datasetNameArray[0], ".tar.gz") {
unZipDatasetCommand = "tar --strip-components=1 -zxvf '" + datasetName + "';"
}
unZipDatasetCommand += "rm -f '" + datasetName + "';"

} else { //多数据集
for _, datasetNameTemp := range datasetNameArray {
if strings.HasSuffix(datasetNameTemp, ".tar.gz") {
unZipDatasetCommand = unZipDatasetCommand + "tar -zxvf '" + datasetNameTemp + "';"
} else {
unZipDatasetCommand = unZipDatasetCommand + "unzip -q '" + datasetNameTemp + "' -d './" + strings.TrimSuffix(datasetNameTemp, ".zip") + "';"
}
unZipDatasetCommand += "rm -f '" + datasetNameTemp + "';"
}

}
return unZipDatasetCommand
}

func getPoolId() string {
var resourcePools modelarts.ResourcePool
json.Unmarshal([]byte(setting.ResourcePools), &resourcePools)

return resourcePools.Info[0].ID
}

func PrepareSpec4Show(task *models.Cloudbrain) {
s, err := resource.GetCloudbrainSpec(task.ID)
if err != nil {
log.Info("error:" + err.Error())
return
}
task.Spec = s
}

func IsTaskNotStop(task *models.Cloudbrain) bool {
statuses := CloudbrainOneNotFinalStatuses
if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
statuses = CloudbrainTwoNotFinalStatuses
} else {
statuses = GrampusNotFinalStatuses
}

for _, status := range statuses {
if task.Status == status {
return true
}
}
return false

}

func SyncTaskStatus(task *models.Cloudbrain) error {
if task.Type == models.TypeCloudBrainOne {
result, err := cloudbrain.GetJob(task.JobID)
if err != nil {
log.Info("error:" + err.Error())
return fmt.Errorf("repo.cloudbrain_query_fail")
}

if result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))

oldStatus := task.Status
task.Status = taskRes.TaskStatuses[0].State

task.ContainerID = taskRes.TaskStatuses[0].ContainerID
models.ParseAndSetDurationFromCloudBrainOne(jobRes, task)

if task.DeletedAt.IsZero() { //normal record
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
}
err = models.UpdateJob(task)
if err != nil {
return fmt.Errorf("repo.cloudbrain_query_fail")

}
}

} else {
log.Info("error:" + err.Error())
return fmt.Errorf("repo.cloudbrain_query_fail")
}
} else if task.Type == models.TypeCloudBrainTwo || task.Type == models.TypeCDCenter {
err := modelarts.HandleTrainJobInfo(task)
if err != nil {
return fmt.Errorf("repo.cloudbrain_query_fail")
}

} else if task.Type == models.TypeC2Net {
result, err := grampus.GetJob(task.JobID)
if err != nil {
log.Error("GetJob failed:" + err.Error())
return fmt.Errorf("repo.cloudbrain_query_fail")
}

if result != nil {
if len(result.JobInfo.Tasks[0].CenterID) == 1 && len(result.JobInfo.Tasks[0].CenterName) == 1 {
task.AiCenter = result.JobInfo.Tasks[0].CenterID[0] + "+" + result.JobInfo.Tasks[0].CenterName[0]
}
oldStatus := task.Status
task.Status = grampus.TransTrainJobStatus(result.JobInfo.Status)

if task.Status != oldStatus || task.Status == models.GrampusStatusRunning {
task.Duration = result.JobInfo.RunSec
if task.Duration < 0 {
task.Duration = 0
}
task.TrainJobDuration = models.ConvertDurationToStr(task.Duration)

if task.StartTime == 0 && result.JobInfo.StartedAt > 0 {
task.StartTime = timeutil.TimeStamp(result.JobInfo.StartedAt)
}
if task.EndTime == 0 && models.IsTrainJobTerminal(task.Status) && task.StartTime > 0 {
task.EndTime = task.StartTime.Add(task.Duration)
}
task.CorrectCreateUnix()
if oldStatus != task.Status {
notification.NotifyChangeCloudbrainStatus(task, oldStatus)
if models.IsTrainJobTerminal(task.Status) && task.ComputeResource == models.NPUResource {
if len(result.JobInfo.Tasks[0].CenterID) == 1 {
urchin.GetBackNpuModel(task.ID, grampus.GetRemoteEndPoint(result.JobInfo.Tasks[0].CenterID[0]), grampus.BucketRemote, grampus.GetNpuModelObjectKey(task.JobName), grampus.GetCenterProxy(setting.Grampus.LocalCenterID))
}
}
}
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob failed:" + err.Error())
return fmt.Errorf("repo.cloudbrain_query_fail")
}
}
}
}
return nil

}

func getTrainJobCommand(option api.CreateTrainJobOption) (string, error) {
var command string
bootFile := strings.TrimSpace(option.BootFile)
params := option.Params

if !strings.HasSuffix(bootFile, ".py") {
log.Error("bootFile(%s) format error", bootFile)
return command, errors.New("bootFile format error")
}

var parameters models.Parameters
var param string
if len(params) != 0 {
err := json.Unmarshal([]byte(params), &parameters)
if err != nil {
log.Error("Failed to Unmarshal params: %s (%v)", params, err)
return command, err
}

for _, parameter := range parameters.Parameter {
param += " --" + parameter.Label + "=" + parameter.Value
}
}
if option.CkptName != "" {
param += " --ckpt_url" + "=" + "/pretrainmodel/" + option.CkptName
}

command += "python /code/" + bootFile + param + " > " + cloudbrain.ModelMountPath + "/" + option.DisplayJobName + "-" + cloudbrain.LogFile

return command, nil
}

func checkMultiNode(userId int64, serverNum int) string {
if serverNum == 1 {
return ""
}
modelarts.InitMultiNode()
var isServerNumValid = false
if modelarts.MultiNodeConfig != nil {
for _, info := range modelarts.MultiNodeConfig.Info {
if isInOrg, _ := models.IsOrganizationMemberByOrgName(info.Org, userId); isInOrg {
if isInNodes(info.Node, serverNum) {
isServerNumValid = true
break
}

}
}
}
if isServerNumValid {
return ""
} else {
return "repo.modelarts.no_node_right"
}
}

func isInNodes(nodes []int, num int) bool {
for _, node := range nodes {
if node == num {
return true
}
}
return false

}

func getUserCommand(engineId int, req *modelarts.GenerateTrainJobReq) (string, string) {
userImageUrl := ""
userCommand := ""
if engineId < 0 {
tmpCodeObsPath := strings.Trim(req.CodeObsPath, "/")
tmpCodeObsPaths := strings.Split(tmpCodeObsPath, "/")
lastCodeDir := "code"
if len(tmpCodeObsPaths) > 0 {
lastCodeDir = tmpCodeObsPaths[len(tmpCodeObsPaths)-1]
}
userCommand = "/bin/bash /home/work/run_train.sh 's3://" + req.CodeObsPath + "' '" + lastCodeDir + "/" + req.BootFile + "' '/tmp/log/train.log' --'data_url'='s3://" + req.DataUrl + "' --'train_url'='s3://" + req.TrainUrl + "'"
var versionInfos modelarts.VersionInfo
if err := json.Unmarshal([]byte(setting.EngineVersions), &versionInfos); err != nil {
log.Info("json parse err." + err.Error())
} else {
for _, engine := range versionInfos.Version {
if engine.ID == engineId {
userImageUrl = engine.Url
break
}
}
}
for _, param := range req.Parameters {
userCommand += " --'" + param.Label + "'='" + param.Value + "'"
}
return userCommand, userImageUrl
}
return userCommand, userImageUrl
}

+ 21
- 6
services/cloudbrain/resource/resource_specification.go View File

@@ -1,20 +1,23 @@
package resource

import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/admin/operate_log"
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"time"
)

func AddResourceSpecification(doerId int64, req models.ResourceSpecificationReq) error {
@@ -210,6 +213,18 @@ func FindAvailableSpecs(userId int64, opts models.FindSpecsOptions) ([]*models.S
return specs, err
}

func FindAvailableSpecs4Show(userId int64, opts models.FindSpecsOptions) ([]*api.SpecificationShow, error) {
specs, err := FindAvailableSpecs(userId, opts)
if err != nil {
return nil, err
}
result := make([]*api.SpecificationShow, len(specs))
for i, v := range specs {
result[i] = convert.ToSpecification(v)
}
return result, nil
}

func filterExclusiveSpecs(r []*models.Specification, userId int64) []*models.Specification {
specs := make([]*models.Specification, 0, len(r))
specMap := make(map[int64]string, 0)


+ 2
- 4
templates/repo/cloudbrain/inference/new.tmpl View File

@@ -331,9 +331,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -387,7 +385,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 19
- 17
templates/repo/cloudbrain/trainjob/show.tmpl View File

@@ -633,24 +633,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -677,12 +677,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -850,23 +850,19 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -887,8 +883,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 21
- 20
templates/repo/grampus/trainjob/show.tmpl View File

@@ -624,24 +624,24 @@

<div class="required inline field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" id="VersionName" name="VersionName" value="V0001">
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input type="hidden" id="versionName" name="versionName" value="V0001">
<input style="width: 45%;" id="JobName" readonly required>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<div class="ui dropdown selection search width70" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -669,12 +669,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -762,7 +762,6 @@
function showMenu() {
var cityObj = $("#modelSelectedFile");
var cityOffset = $("#modelSelectedFile").offset();
//$("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("#menuContent").slideDown("fast");
$("body").bind("mousedown", onBodyDown);
}
@@ -861,11 +860,10 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val("V0001")
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val("V0001")
if(obj.ComputeResource=="NPU"){
if (obj.EngineName != null && obj.EngineName != "") {
@@ -873,16 +871,16 @@
srcEngine = srcEngine.trim().toLowerCase();
if (srcEngine == 'tensorflow') {
$('#choice_Engine .default.text').text("TensorFlow");
$('#choice_Engine input[name="Engine"]').val(1)
$('#choice_Engine input[name="engine"]').val(1)
}
if (srcEngine == 'mindspore') {
$('#choice_Engine .default.text').text("MindSpore");
$('#choice_Engine input[name="Engine"]').val(2)
$('#choice_Engine input[name="engine"]').val(2)
}
}
}else{
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
}
$('#choice_Engine .default.text').css({ "color": "rgb(0, 0, 0,0.87)" })
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
@@ -890,9 +888,6 @@
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -914,8 +909,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 2
- 4
templates/repo/modelarts/inferencejob/new.tmpl View File

@@ -362,9 +362,7 @@
$('#model_name_version').empty()
let html = ''
nameMap[value].forEach(element => {
let {TrainTaskInfo} = element
TrainTaskInfo = JSON.parse(TrainTaskInfo)
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`
});
$('#model_name_version').append(html)
$("#select_model_version").removeClass("loading")
@@ -418,7 +416,7 @@
}
function loadCheckpointList(value){
return new Promise((resolve,reject)=>{
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{ID:value}, (data) => {
$.get(`${RepoLink}/modelmanage/query_modelfile_for_predict`,{id:value}, (data) => {
resolve(data)
})
})


+ 23
- 21
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -662,29 +662,29 @@
<div class="two inline fields ">
<div class="required ten wide field">
<label>{{.i18n.Tr "repo.modelarts.train_job"}}</label>&nbsp;
<input type="hidden" class="width83" id="JobId" name="JobId" readonly required>
<input type="hidden" class="width83" id="jobId" name="jobId" readonly required>
<input class="width83" id="JobName" readonly required>

</div>
<div class="required six widde field">
<label>{{.i18n.Tr "repo.model.manage.version"}}</label>
<input class="width70" id="VersionName" name="VersionName" readonly required>
<input class="width70" id="versionName" name="versionName" readonly required>
</div>
</div>

<div class="required inline field" id="modelname">
<label>{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<input style="width: 45%;" id="name" name="Name" required maxlength="25"
<input style="width: 45%;" id="name" name="name" required maxlength="25"
onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
<div class="required inline field" id="verionname">
<label>{{.i18n.Tr "repo.modelconvert.modelversion"}}</label>
<input style="width: 45%;" id="version" name="Version" value="" readonly required maxlength="255">
<input style="width: 45%;" id="version" name="version" value="" readonly required maxlength="255">
</div>
<div class="unite min_title inline field required">
<label>{{.i18n.Tr "repo.model.manage.engine"}}</label>
<input type="hidden" id="Engine" name="Engine" required>
<input style="width: 45%;" id="Engine_name" name="Engine_name" readonly required maxlength="255">
<input type="hidden" id="engine" name="engine" required>
<input style="width: 45%;" id="engine_name" name="engine_name" readonly required maxlength="255">
</div>
<div class="unite min_title inline fields required">
<div class="field required">
@@ -699,12 +699,12 @@
</div>
<div class="inline field">
<label>{{.i18n.Tr "repo.model.manage.modellabel"}}</label>
<input style="width: 83%;margin-left: 7px;" id="label" name="Label" maxlength="255"
<input style="width: 83%;margin-left: 7px;" id="label" name="label" maxlength="255"
placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
<div class="inline field">
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}}</label>
<textarea style="width: 83%;margin-left: 7px;" id="Description" name="Description" rows="3"
<textarea style="width: 83%;margin-left: 7px;" id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -899,27 +899,23 @@
.modal({
centered: false,
onShow: function () {
$('input[name="Version"]').addClass('model_disabled')
// $('input[name="JobId"]').text(obj.JobName)
$('input[name="version"]').addClass('model_disabled')
$('#JobName').val(obj.DisplayJobName).addClass('model_disabled')
$('input[name="JobId"]').val(obj.JobID)
$('input[name="VersionName"]').val(obj.VersionName).addClass('model_disabled')
$('input[name="jobId"]').val(obj.JobID)
$('input[name="versionName"]').val(obj.VersionName).addClass('model_disabled')
if(obj.EngineID ==122 || obj.EngineID ==35 || obj.EngineID ==-1 || obj.EngineID ==37){
$('input[name="Engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="Engine"]').val(2);
$('input[name="engine_name"]').val("MindSpore").addClass('model_disabled');
$('input[name="engine"]').val(2);
}
if(obj.EngineID ==121 || obj.EngineID ==38){
$('input[name="Engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="Engine"]').val(1);
$('input[name="engine_name"]').val("TensorFlow").addClass('model_disabled');
$('input[name="engine"]').val(1);
}
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
createModelName();
loadSelectedModelFile(obj);
},
onHide: function () {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
$('.ui.dimmer').css({ "background-color": "" })
$('.ui.error.message').text()
$('.ui.error.message').css('display', 'none')
@@ -940,8 +936,14 @@
type: 'POST',
data: data,
success: function (res) {
$('input[name="Engine_name"]').val("");
$('input[name="Engine"]').val("");
$('input[name="engine_name"]').val("");
$('input[name="engine"]').val("");
$('input[name="jobId"]').val("");
$('input[name="label"]').val("");
$('input[name="description"]').val("");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
document.getElementById("formId").reset();
location.href = `/${userName}/${repoPath}/modelmanage/show_model`
$('.ui.modal.second').modal('hide')
},


+ 39
- 38
templates/repo/modelmanage/convertIndex.tmpl View File

@@ -93,7 +93,7 @@
<div class="ui grid stackable item">
<div class="row">
<div class="three wide column padding0">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?ID={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<a class="title" href="{{$.RepoLink}}/modelmanage/show_model_convert_info?id={{.ID}}" title="{{.Name}}" style="font-size: 14px;">
<span class="fitted" style="width: 90%;vertical-align: middle;">{{.Name}}</span>
</a>
</div>
@@ -141,7 +141,7 @@
</form>

{{if .IsCanOper}}
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?AllDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
<a id="ai-download-{{.ID}}" href="{{$.Repository.HTMLURL}}/modelmanage/download_model_convert/{{.ID}}?allDownload=true&a=1" class='ui basic {{if eq .Status "SUCCEEDED" "COMPLETED"}}blue {{else}}disabled {{end}}button' style="border-radius: .28571429rem;">
{{$.i18n.Tr "repo.modelconvert.download"}}
</a>
{{else}}
@@ -233,7 +233,7 @@
</div>
<div class="ui dropdown selection search eight wide field" id="choice_version">
<input type="hidden" id="ModelVersion" name="ModelVersion" required>
<input type="hidden" id="modelVersion" name="modelVersion" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectversion"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-version">
@@ -246,7 +246,7 @@
<label for="choice_file">{{$.i18n.Tr "repo.model.manage.modelfile"}}</label>
</div>
<div class="ui dropdown selection search eight wide field" id="choice_file">
<input type="hidden" id="ModelFile" name="ModelFile" required>
<input type="hidden" id="modelFile" name="modelFile" required>
<div class="default text">{{$.i18n.Tr "repo.modelconvert.selectmodelfile"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="model-file">
@@ -260,10 +260,10 @@
</div>
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="SrcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
<label for="srcEngine">{{$.i18n.Tr "repo.modelconvert.srcengine"}}</label>
</div>
<select id="SrcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="SrcEngine" onchange="javascript:srcEngineChanged()">
<select id="srcEngine" class="ui search dropdown eight wide field" placeholder="" style='color:#000000;' name="srcEngine" onchange="javascript:srcEngineChanged()">
</select>
</div>
@@ -289,30 +289,30 @@
<div class="unite min_title inline fields required">
<div class="three wide field right aligned">
<label for="DestFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
<label for="destFormat">{{$.i18n.Tr "repo.modelconvert.outputformat"}}</label>
</div>
<select id="DestFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="DestFormat">
<select id="destFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="destFormat">
</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="NetOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
<label for="netOutputFormat">{{$.i18n.Tr "repo.modelconvert.netoutputdata"}}&nbsp;&nbsp;</label>
</div>
<select id="NetOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="NetOutputFormat">
<select id="netOutputFormat" class="ui search dropdown eight wide field" placeholder="" style='width:50%' name="netOutputFormat">

</select>
</div>
<div class="unite min_title inline fields">
<div class="three wide field right aligned">
<label for="Description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
<label for="description">{{$.i18n.Tr "repo.modelconvert.taskdesc"}}&nbsp;&nbsp;</label>
</div>
<div class="twelve wide field">
<textarea id="Description" name="Description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
<textarea id="description" name="description" rows="1" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 256)"></textarea>
</div>
</div>
<div class="unite min_title inline field">
@@ -364,9 +364,9 @@
$("#task_name").removeClass("error")
}

data['desc']= $('#Description').val()
data['modelId'] = $('#ModelVersion').val()
data['SrcEngine'] = $('#SrcEngine').val();
data['desc']= $('#description').val()
data['modelId'] = $('#modelVersion').val()
data['srcEngine'] = $('#srcEngine').val();
data['inputshape']= $('#inputshape').val();

if(inputshapeNotValid(data['inputshape'])){
@@ -379,10 +379,10 @@
}

data['inputdataformat']= $('#inputdataformat').val();
data['DestFormat'] = $('#DestFormat').val();
data['NetOutputFormat']= $('#NetOutputFormat').val();
data['ModelFile'] = $('#ModelFile').val();
if(data['ModelFile']==""){
data['destFormat'] = $('#destFormat').val();
data['netOutputFormat']= $('#netOutputFormat').val();
data['modelFile'] = $('#modelFile').val();
if(data['modelFile']==""){
$('.ui.error.message').text("{{.i18n.Tr "repo.modelconvert.modelfileempty"}}")
$('.ui.error.message').css('display','block')
$("#ModelFile_Div").addClass("error")
@@ -392,11 +392,11 @@
}
$.post(`${repolink}/modelmanage/create_model_convert`,data,(result) => {
console.log("result=" + result);
if(result.result_code ==0){
if(result.code ==0){
$('.ui.modal.second').modal('hide');
window.location.reload();
}else{
$('.ui.error.message').text(result.message)
$('.ui.error.message').text(result.msg)
$('.ui.error.message').css('display','block')
}
})
@@ -456,7 +456,7 @@
$('#choice_version').dropdown({
onChange:function(value){
console.log("model version:" + value);
$('#choice_version input[name="ModelVersion"]').val(value)
$('#choice_version input[name="modelVersion"]').val(value)
loadModelFile(value);
}
})
@@ -464,26 +464,26 @@
$('#choice_file').dropdown({
onChange:function(value){
console.log("model file:" + value);
$('#choice_file input[name="ModelFile"]').val(value)
$('#choice_file input[name="modelFile"]').val(value)
}
})

})

function srcEngineChanged(){
var ele = window.document.getElementById("SrcEngine");
var ele = window.document.getElementById("srcEngine");
var index=ele.selectedIndex;
var options=ele.options;
var option = options[index];
console.log("SrcEngine value=" + option);
console.log("srcEngine value=" + option);
let destFormatHtml = "<option name=\"ONNX\" value=\"0\">ONNX</option>";
let netOutputFormatHtml = "<option name=\"FP32\" value=\"0\">FP32</option>";
if(option==null || option =="undefined" || option.value == 0){
destFormatHtml += "<option name=\"TensorRT\" value=\"1\">TensorRT</option>"
netOutputFormatHtml += "<option name=\"FP16\" value=\"1\">FP16</option>";
}
$('#DestFormat').html(destFormatHtml);
$('#NetOutputFormat').html(netOutputFormatHtml);
$('#destFormat').html(destFormatHtml);
$('#netOutputFormat').html(netOutputFormatHtml);
}
function loadModelList(){
@@ -509,7 +509,7 @@
if(modelId ==null || modelId ==""){
console.log("modelId is null");
}else{
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?ID=${modelId}`, (data) => {
$.get(`${repolink}/modelmanage/query_modelfile_for_predict?id=${modelId}`, (data) => {
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -526,7 +526,7 @@
}
$("#model-file").append(file_html)
$('#choice_file .default.text').text(firstFileName)
$('#choice_file input[name="ModelFile"]').val(firstFileName)
$('#choice_file input[name="modelFile"]').val(firstFileName)
})

}
@@ -550,19 +550,19 @@
n_length = versionList.length
let train_html=''
for (let i=0;i<n_length;i++){
train_html += `<div class="item" data-value="${versionList[i].ID}">${versionList[i].Version}</div>`
train_html += `<div class="item" data-value="${versionList[i].id}">${versionList[i].version}</div>`
train_html += '</div>'
}
$("#model-version").append(train_html)
$('#choice_version .default.text').text(versionList[0].Version)
$('#choice_version input[name="ModelVersion"]').val(versionList[0].ID)
loadModelFile(versionList[0].ID);
$('#choice_version .default.text').text(versionList[0].version)
$('#choice_version input[name="modelVersion"]').val(versionList[0].id)
loadModelFile(versionList[0].id);
}
setEngineValue(value);
}
function setEngineValue(value){
$('#SrcEngine').dropdown('clear');
$('#srcEngine').dropdown('clear');
console.log("setEngineValue value=" + value);
let html = ""
html +="<option name=\"PyTorch\" " + getSelected(0,value) + " value=\"0\">PyTorch</option>";
@@ -570,7 +570,8 @@
html +="<option name=\"MindSpore\" " + getSelected(2,value) + " value=\"2\">MindSpore</option>";
html +="<option name=\"PaddlePaddle\" " + getSelected(4,value) + " value=\"4\">PaddlePaddle</option>";
html +="<option name=\"MXNet\" " + getSelected(6,value) + " value=\"6\">MXNet</option>";
$('#SrcEngine').html(html);

$('#srcEngine').html(html);
srcEngineChanged();
}
function getSelected(engineOption, modelName){
@@ -580,13 +581,13 @@
let nameMap = modelData.nameMap
let versionList = nameMap[modelName]
if(versionList != null && versionList.length >0){
if(versionList[0].Engine == engineOption){
if(versionList[0].engine == engineOption){
return "selected=\"selected\"";
}else{
if((versionList[0].Engine==122 || versionList[0].Engine==37) && engineOption==2){
if((versionList[0].engine==122 || versionList[0].engine==37) && engineOption==2){
return "selected=\"selected\"";
}
if((versionList[0].Engine==121 || versionList[0].Engine==38) && engineOption==1){
if((versionList[0].engine==121 || versionList[0].engine==38) && engineOption==1){
return "selected=\"selected\"";
}
}


+ 28
- 25
templates/repo/modelmanage/index.tmpl View File

@@ -138,20 +138,20 @@
<input type="hidden" name="_csrf" value="">
<div class="inline fields">
<div class="required two wide field right aligned">
<label for="JobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
<label for="jobId">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</label>
</div>
<div class="required thirteen wide inline field">
<div class="ui dropdown selection search loading" id="choice_model">
<input type="hidden" id="JobId" name="JobId" required>
<input type="hidden" id="jobId" name="jobId" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.trainjob"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-name">
</div>
</div>
<label for="VersionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="versionName">{{.i18n.Tr "repo.model.manage.version"}}</label>
<span>&nbsp;</span>
<div class="ui dropdown selection search" id="choice_version">
<input type="hidden" id="VersionName" name="VersionName" required>
<input type="hidden" id="versionName" name="versionName" required>
<div class="default text">{{.i18n.Tr "repo.model.manage.select.version"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-version">
@@ -162,18 +162,18 @@
</div>
<div class="required inline fields" id="modelname">
<div class="two wide field right aligned">
<label for="Name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
<label for="name">{{.i18n.Tr "repo.model.manage.model_name"}}</label>
</div>
<div class="eight wide field">
<input id="name" name="Name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
<input id="name" name="name" required maxlength="25" onkeyup="this.value=this.value.replace(/[, ]/g,'')">
</div>
</div>
<div class="required inline fields" id="verionname">
<div class="required inline fields" id="verionName">
<div class="two wide field right aligned">
<label for="Version">{{.i18n.Tr "repo.model.manage.version"}}</label>
<label for="version">{{.i18n.Tr "repo.model.manage.version"}}</label>
</div>
<div class="eight wide field">
<input id="version" name="Version" value="" readonly required maxlength="255">
<input id="version" name="version" value="" readonly required maxlength="255">
</div>
</div>

@@ -182,7 +182,7 @@
<label for="Engine">{{.i18n.Tr "repo.model.manage.engine"}}</label>
</div>
<div class="ui ten wide field dropdown selection search" id="choice_Engine">
<input type="hidden" id="Engine" name="Engine" required>
<input type="hidden" id="engine" name="engine" required>
<div class="default text newtext">{{.i18n.Tr "repo.model.manage.select.engine"}}</div>
<i class="dropdown icon"></i>
<div class="menu" id="job-Engine">
@@ -209,7 +209,7 @@
<label for="Label">{{.i18n.Tr "repo.model.manage.modellabel"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<input id="label" name="Label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
<input id="label" name="label" maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.label_place"}}'>
</div>
</div>
<div class="inline fields">
@@ -217,7 +217,7 @@
<label for="description">{{.i18n.Tr "repo.model.manage.modeldesc"}} &nbsp</label>
</div>
<div class="thirteen wide field">
<textarea id="Description" name="Description" rows="3"
<textarea id="description" name="description" rows="3"
maxlength="255" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}'
onchange="this.value=this.value.substring(0, 255)"
onkeydown="this.value=this.value.substring(0, 255)"
@@ -331,7 +331,7 @@
centered: false,
onShow: function () {
$('#model_header').text({{.i18n.Tr "repo.model.manage.import_new_model"}})
$('input[name="Version"]').addClass('model_disabled')
$('input[name="version"]').addClass('model_disabled')
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$("#job-name").empty()
createModelName()
@@ -368,7 +368,7 @@
console.log("model version:" + value);
if (modelData != null) {
for (var i = 0; i < modelData.length; i++) {
if (modelData[i].VersionName == value) {
if (modelData[i].versionName == value) {
setEngine(modelData[i])
loadModelFile(modelData[i])
break;
@@ -406,7 +406,7 @@
$("#job-name").append(train_html)
$("#choice_model").removeClass("loading")
$('#choice_model .default.text').text(data[0].DisplayJobName)
$('#choice_model input[name="JobId"]').val(data[0].JobID)
$('#choice_model input[name="jobId"]').val(data[0].JobID)
loadTrainVersion()
}else{
$("#choice_model").removeClass("loading")
@@ -414,8 +414,9 @@
})
}
function loadTrainVersion(value) {
let JobID = !value ? $('#choice_model input[name="JobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?JobID=${JobID}`, (data) => {
let tmp = $('#choice_model input[name="jobId"]').val();
let jobId = !value ? $('#choice_model input[name="jobId"]').val() : value
$.get(`${repolink}/modelmanage/query_train_job_version?jobId=${jobId}`, (data) => {
const n_length = data.length
let train_html = '';
modelData = data;
@@ -431,7 +432,7 @@
versionName = "V0001";
}
$('#choice_version .default.text').text(versionName)
$('#choice_version input[name="VersionName"]').val(versionName)
$('#choice_version input[name="versionName"]').val(versionName)
setEngine(data[0])
loadModelFile(data[0])
}
@@ -453,7 +454,9 @@
type=0;
}
}
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&VersionName=${trainJob.VersionName}`, (data) => {
$.get(`${repolink}/modelmanage/query_train_model?jobName=${trainJob.JobName}&type=${type}&versionName=${trainJob.VersionName}`, (data) => {
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
const n_length = data.length
let file_html=''
let firstFileName =''
@@ -518,12 +521,12 @@
}
}
}
function setEngine(modelVersion) {
console.log("modelVersion=" + modelVersion);
function setEngine(trainJob) {
console.log("trainJob=" + trainJob);
$('#choice_Engine').dropdown('clear')
$("#job-Engine").empty()
if (modelVersion.EngineName != null && modelVersion.EngineName != "") {
srcEngine = modelVersion.EngineName.split('-')[0]
if (trainJob.EngineName != null && trainJob.EngineName != "") {
srcEngine = trainJob.EngineName.split('-')[0]
srcEngine = srcEngine.trim().toLowerCase();
let selectedText = "PyTorch";
let selectedValue = 0;
@@ -548,7 +551,7 @@
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"

$('#choice_Engine .default.text').text(selectedText)
$('#choice_Engine input[name="Engine"]').val(selectedValue)
$('#choice_Engine input[name="engine"]').val(selectedValue)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
} else {
@@ -560,7 +563,7 @@
itemHtml += "<option class=\"item\" data-value=\"6\">MXNet</option>"
itemHtml += "<option class=\"item\" data-value=\"3\">Other</option>"
$('#choice_Engine .default.text').text("PyTorch");
$('#choice_Engine input[name="Engine"]').val(0)
$('#choice_Engine input[name="engine"]').val(0)
$("#job-Engine").append(itemHtml);
$("#choice_Engine").removeClass('disabled');
}


+ 69
- 71
templates/repo/modelmanage/showinfo.tmpl View File

@@ -91,16 +91,16 @@
<tbody>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.model_name"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="ModelName" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="modelName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Version" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="version" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.migrate_items_labels"}}</td>
<td class="ti-text-form-content">
<div id="Label" style="overflow: hidden;width: 95%;">
<div id="label" style="overflow: hidden;width: 95%;">
</div>
@@ -109,17 +109,17 @@
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.model_size"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Size" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="size" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.createtime"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CreateTime" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="createTime" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.model.manage.description"}}</td>
<td class="ti-text-form-content" >
<div id="edit-td" style="display:flex">
<span id="Description" title="" class="iword-elipsis"></span>
<span id="description" title="" class="iword-elipsis"></span>
<i id="edit-pencil" data-id="" data-desc="" class="pencil alternate icon" style="cursor:pointer;vertical-align: top;" id="editor" onclick="editorFn(this)"></i>
</div>
</td>
@@ -127,38 +127,38 @@
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job"}}</td>
<td class="ti-text-form-content word-elipsis">
<a id="DisplayJobNameHref" class="title" style="font-size: 14px;" target="_blank">
<span id="DisplayJobName" class="fitted" style="width: 90%;vertical-align: middle;"></span>
<a id="displayJobNameHref" class="title" style="font-size: 14px;" target="_blank">
<span id="displayJobName" class="fitted" style="width: 90%;vertical-align: middle;"></span>
</a>
</td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.code_version"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="CodeBranch" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="codeBranch" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.start_file"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="BootFile" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="bootFile" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.train_dataset"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="DatasetName" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="datasetName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.run_parameter"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="Parameters" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="parameters" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.AI_Engine"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="EngineName" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="engineName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.standard"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="FlavorName" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="flavorName" title=""></span></td>
</tr>
<tr>
<td class="ti-text-form-label text-width80">{{$.i18n.Tr "repo.modelarts.train_job.compute_node"}}</td>
<td class="ti-text-form-content word-elipsis"><span id="WorkServerNumber" title=""></span></td>
<td class="ti-text-form-content word-elipsis"><span id="workServerNumber" title=""></span></td>
</tr>
</tbody>
</table>
@@ -214,47 +214,47 @@ $(document).ready(loadInfo);
function changeInfo(version){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let versionData = data.filter((item)=>{
return item.Version === version
return item.version === version
})
let returnArray = []
returnArray = transObj(versionData)
let [initObj,initModelAcc,id] = returnArray
editorCancel('','')
renderInfo(initObj,initModelAcc,id)
loadModelFile(versionData[0].ID,versionData[0].Version,'','','init')
loadModelFile(versionData[0].id,versionData[0].version,'','','init')
})
}
function loadInfo(){
$.get(`${url}show_model_info_api?name=${ID}`,(data)=>{
let html = ''
for (let i=0;i<data.length;i++){
if(!data[i].IsCanOper){
if(!data[i].isCanOper){
$("#edit-pencil").css("display","none")
}
html += `<option value="${data[i].Version}">${data[i].Version}</option>`
html += `<option value="${data[i].version}">${data[i].version}</option>`
}
$('#dropdown').append(html)
let returnArray = []
returnArray = transObj(data)
let [initObj,initModelAcc,id] = returnArray
renderInfo(initObj,initModelAcc,id)
loadModelFile(data[0].ID,data[0].Version,'','','init')
loadModelFile(data[0].id,data[0].version,'','','init')
})
}
function getEngineName(model){
if(model.Engine == 0){
if(model.engine == 0){
return "PyTorch";
}else if(model.Engine == 1 || model.Engine == 121 || model.Engine == 38){
}else if(model.engine == 1 || model.engine == 121 || model.engine == 38){
return "TensorFlow";
}else if(model.Engine == 2 || model.Engine == 122 || model.Engine == 35 || model.Engine == 37){
}else if(model.engine == 2 || model.engine == 122 || model.engine == 35 || model.engine == 37){
return "MindSpore";
}else if(model.Engine == 3){
}else if(model.engine == 3){
return "Other";
}else if(model.Engine == 4){
}else if(model.engine == 4){
return "PaddlePaddle";
}else if(model.Engine == 5){
}else if(model.engine == 5){
return "OneFlow";
}else if(model.Engine == 6){
}else if(model.engine == 6){
return "MXNet";
}
else{
@@ -262,36 +262,34 @@ function loadInfo(){
}
}
function transObj(data){
let {ID,Name,Version,Label,Size,Description,CreatedUnix,Accuracy,CodeBranch,CodeCommitID,TrainTaskInfo} = data[0]
let modelAcc = JSON.parse(Accuracy)
TrainTaskInfo = JSON.parse(TrainTaskInfo)
// Parameters = JSON.parse(Parameters)
let {Parameters} = TrainTaskInfo
let EngineName = getEngineName(data[0])
Parameters = JSON.parse(Parameters)
Parameters = Parameters.parameter.length === 0 ? '--':Parameters.parameter
let size = tranSize(Size)
let time = transTime(CreatedUnix)
let {id,name,version,label,size,description,createdUnix,accuracy,codeBranch,codeCommitID,trainTaskInfo} = data[0]
let modelAcc = JSON.parse(accuracy)
trainTaskInfo = JSON.parse(trainTaskInfo)
let engineName = getEngineName(data[0])
parameters = JSON.parse(trainTaskInfo.Parameters)
parameters = parameters.parameter.length === 0 ? '--':parameters.parameter
size = tranSize(size)
let time = transTime(createdUnix)
let initObj = {
ModelName:Name || '--',
Version:Version,
Label:Label || '--',
Size:size,
CreateTime:time,
Description:Description || '--',
CodeBranch:CodeBranch || '--',
CodeCommitID:CodeCommitID || '--',
BootFile:TrainTaskInfo.BootFile || '--',
DatasetName:TrainTaskInfo.DatasetName || '--',
Parameters:TrainTaskInfo.Parameters || '--',
FlavorName:TrainTaskInfo.FlavorName || '--',
WorkServerNumber:TrainTaskInfo.WorkServerNumber || '1',
Parameters:Parameters,
EngineName:EngineName,
DisplayJobName:TrainTaskInfo.DisplayJobName || '--',
TrainJobVersionName:TrainTaskInfo.VersionName || '',
CloudBrainJobID:TrainTaskInfo.JobID|| '',
CloudBrainType:TrainTaskInfo.Type,
modelName:name || '--',
version:version,
label:label || '--',
size:size,
createTime:time,
description:description || '--',
codeBranch:codeBranch || '--',
codeCommitID:codeCommitID || '--',
bootFile:trainTaskInfo.BootFile || '--',
datasetName:trainTaskInfo.DatasetName || '--',
parameters:trainTaskInfo.Parameters || '--',
flavorName:trainTaskInfo.FlavorName || '--',
workServerNumber:trainTaskInfo.WorkServerNumber || '--',
parameters:parameters,
engineName:engineName,
displayJobName:trainTaskInfo.DisplayJobName || '--',
trainJobVersionName:trainTaskInfo.VersionName || '',
cloudBrainJobID:trainTaskInfo.JobID|| '',
cloudBrainType:trainTaskInfo.Type,
}
let initModelAcc = {
Accuracy: modelAcc.Accuracy || '--',
@@ -299,7 +297,7 @@ function transObj(data){
Precision:modelAcc.Precision || '--',
Recall: modelAcc.Recall || '--'
}
return [initObj,initModelAcc,ID]
return [initObj,initModelAcc,id]
}
function transTime(time){
@@ -338,8 +336,8 @@ function editorSure(text,id){
let description=$('#textarea-value').val()
let sourcetext = $('#textarea-value').val().replace(/\n/g,'enter;')
let data = {
ID:id,
Description:description
id:id,
description:description
}
$.ajax({
url:`${url}modify_model`,
@@ -351,14 +349,14 @@ function editorSure(text,id){
}
function renderInfo(obj,accObj,id){
for(let key in obj){
if(key==="Description"){
if(key==="description"){
let descriptionText=obj[key].replace(/\r\n|\n/g,'enter;')
$(`#${key}`).text(obj[key])
$(`#${key}`).attr("title",obj[key])
$('#edit-pencil').attr("data-id",id)
$('#edit-pencil').attr("data-desc",descriptionText)
}
else if(key==="Label"){
else if(key==="label"){
$('#Label').empty()
if(obj[key]==='--'){
$('#Label').text(obj[key])
@@ -372,33 +370,33 @@ function renderInfo(obj,accObj,id){
$('#Label').append(html)
}
}
else if(key==="CodeCommitID"){
else if(key==="codeCommitID"){
let codeCommit = obj[key].slice(0,10)
let html = `<a style="margin-left:1rem" class="ui label" title="${codeCommit}">${codeCommit}</a>`
$('#CodeBranch').append(html)

}
else if(key==="DisplayJobName"){
let type=obj["CloudBrainType"]
else if(key==="displayJobName"){
let type=obj["cloudBrainType"]
let href=""
if(type==1){
href=trainJobUrl + "modelarts/train-job/" + obj["CloudBrainJobID"]
href=trainJobUrl + "modelarts/train-job/" + obj["cloudBrainJobID"]
}else if(type==0){
href=trainJobUrl + "cloudbrain/train-job/" + obj["CloudBrainJobID"]
href=trainJobUrl + "cloudbrain/train-job/" + obj["cloudBrainJobID"]
}else if(type==2){
href=trainJobUrl + "grampus/train-job/" + obj["CloudBrainJobID"]
}
$(`#DisplayJobNameHref`).attr("href",href)
$(`#DisplayJobNameHref`).attr("title",obj[key])
$(`#displayJobNameHref`).attr("href",href)
$(`#displayJobNameHref`).attr("title",obj[key])
$(`#${key}`).text(obj[key])

let versionName = obj["TrainJobVersionName"]
let versionName = obj["trainJobVersionName"]
if(versionName!=""){
let html = `<span style="margin-left:1rem" class="ui label">${versionName}</span>`
$('#DisplayJobName').append(html)
$('#displayJobName').append(html)
}
}
else if(key==="Parameters"){
else if(key==="parameters"){
if(obj[key]==='--'){
$(`#${key}`).text(obj[key])
}else{


+ 78
- 78
web_src/js/components/Model.vue View File

@@ -13,7 +13,7 @@
:header-cell-style="tableHeaderStyle"
>
<el-table-column
prop="Name"
prop="name"
:label="i18n.model_name"
align="left"
min-width="17%"
@@ -25,105 +25,105 @@
<!-- <i class="el-icon-time"></i> -->
<a
class="text-over"
:href="showinfoHref + scope.row.Name"
:title="scope.row.Name"
>{{ scope.row.Name }}</a
:href="showinfoHref + scope.row.name"
:title="scope.row.name"
>{{ scope.row.name }}</a
>
</template>
</el-table-column>
<el-table-column
prop="Status"
prop="status"
:label="i18n.model_status"
align="center"
min-width="6.5%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Status_title">
<i style="vertical-align: middle" :class="scope.row.Status"></i
<span class="text-over" :title="scope.row.status_title">
<i style="vertical-align: middle" :class="scope.row.status"></i
></span>
</template>
</el-table-column>
<el-table-column
prop="Version"
prop="version"
:label="i18n.model_version"
align="center"
min-width="6%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.Version">{{
scope.row.Version
<span class="text-over" :title="scope.row.version">{{
scope.row.version
}}</span>
</template>
</el-table-column>
<el-table-column
prop="VersionCount"
prop="versionCount"
:label="i18n.model_version_num"
align="center"
min-width="7%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.VersionCount">{{
scope.row.VersionCount
<span class="text-over" :title="scope.row.versionCount">{{
scope.row.versionCount
}}</span>
</template>
</el-table-column>

<el-table-column
prop="Size"
prop="size"
:label="i18n.model_size"
align="center"
min-width="10%"
>
<template slot-scope="scope">
<span class="text-over">{{ renderSize(scope.row.Size) }}</span>
<span class="text-over">{{ renderSize(scope.row.size) }}</span>
</template>
</el-table-column>
<el-table-column
prop="EngineName"
prop="engineName"
:label="i18n.model_egine"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over" :title="scope.row.EngineName">{{
scope.row.EngineName
<span class="text-over" :title="scope.row.engineName">{{
scope.row.engineName
}}</span>
</template>
</el-table-column>
<el-table-column
prop="ComputeResource"
prop="computeResource"
:label="i18n.model_compute_resource"
align="center"
min-width="8%"
>
<template slot-scope="scope">
<span class="text-over">{{ scope.row.ComputeResource }}</span>
<span class="text-over">{{ scope.row.computeResource }}</span>
</template>
</el-table-column>
<el-table-column
prop="CreatedUnix"
prop="createdUnix"
:label="i18n.model_create_time"
align="center"
min-width="13.75%"
>
<template slot-scope="scope">
{{ transTime(scope.row.CreatedUnix) }}
{{ transTime(scope.row.createdUnix) }}
</template>
</el-table-column>
<el-table-column
prop="UserName"
prop="userName"
:label="i18n.model_creator"
align="center"
min-width="6.75%"
>
<template slot-scope="scope">
<a
:href="!scope.row.UserName ? '#' : '/' + scope.row.UserName"
:title="scope.row.UserName || defaultAvatarName"
:href="!scope.row.userName ? '#' : '/' + scope.row.userName"
:title="scope.row.userName || defaultAvatarName"
>
<img
class="ui avatar image"
:src="scope.row.UserRelAvatarLink || defaultAvatar"
:src="scope.row.userRelAvatarLink || defaultAvatar"
/>
</a>
</template>
@@ -140,25 +140,25 @@
:style="{
visibility: !scope.row.Children ? 'visible' : 'hidden',
}"
:class="{ disabled: !scope.row.IsCanOper }"
:class="{ disabled: !scope.row.isCanOper }"
@click="
showcreateVue(
scope.row.Name,
scope.row.Version,
scope.row.Label
scope.row.name,
scope.row.version,
scope.row.label
)
"
>{{ i18n.model_create_new_ver }}</a
>
<a
:href="loadhref + scope.row.ID"
:class="{ disabled: !scope.row.IsCanOper }"
:href="loadhref + scope.row.id"
:class="{ disabled: !scope.row.isCanOper }"
>{{ i18n.model_download }}</a
>
<a
:class="{ disabled: !scope.row.IsCanDelete }"
:class="{ disabled: !scope.row.isCanDelete }"
@click="
deleteModel(scope.row.ID, scope.row.cName, scope.row.rowKey)
deleteModel(scope.row.id, scope.row.cName, scope.row.rowKey)
"
>{{ i18n.model_delete }}</a
>
@@ -219,17 +219,17 @@ export default {
},
})
.then((res) => {
let TrainTaskInfo;
let trainTaskInfo;
let tableData;
tableData = res.data;
for (let i = 0; i < tableData.length; i++) {
TrainTaskInfo = JSON.parse(tableData[i].TrainTaskInfo);
tableData[i].EngineName = this.getEngineName(tableData[i]);
tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
tableData[i].cName = tableData[i].Name;
tableData[i].rowKey = tableData[i].ID + Math.random();
tableData[i].Name = "";
tableData[i].VersionCount = "";
trainTaskInfo = JSON.parse(tableData[i].trainTaskInfo);
tableData[i].engineName = this.getEngineName(tableData[i]);
tableData[i].computeResource = trainTaskInfo.ComputeResource;
tableData[i].cName = tableData[i].name;
tableData[i].rowKey = tableData[i].id + Math.random();
tableData[i].name = "";
tableData[i].versionCount = "";
tableData[i].Children = true;
}
resolve(tableData || []);
@@ -258,10 +258,10 @@ export default {
centered: false,
onShow: function () {
$("#model_header").text(title);
$('input[name="Name"]').addClass("model_disabled");
$('input[name="Name"]').attr("readonly", "readonly");
$('input[name="name"]').addClass("model_disabled");
$('input[name="name"]').attr("readonly", "readonly");
$('input[name="modelSelectedFile"]').attr("readonly", "readonly");
$('input[name="Version"]').addClass("model_disabled");
$('input[name="version"]').addClass("model_disabled");
$(".ui.dimmer").css({
"background-color": "rgb(136, 136, 136,0.7)",
});
@@ -274,8 +274,8 @@ export default {
},
onHide: function () {
document.getElementById("formId").reset();
$('input[name="Name"]').removeClass("model_disabled");
$('input[name="Name"]').removeAttr("readonly");
$('input[name="name"]').removeClass("model_disabled");
$('input[name="name"]').removeAttr("readonly");
$('input[name="modelSelectedFile"]').removeAttr("readonly");
var cityObj = $("#modelSelectedFile");
cityObj.attr("value", "");
@@ -290,8 +290,8 @@ export default {
.modal("show");
},
check() {
let jobid = document.getElementById("JobId").value;
let versionname = document.getElementById("VersionName").value;
let jobid = document.getElementById("jobId").value;
let versionname = document.getElementById("versionName").value;
let name = document.getElementById("name").value;
let version = document.getElementById("version").value;
let modelSelectedFile =
@@ -333,8 +333,8 @@ export default {
let context = this;
let flag = this.check();
if (flag) {
let cName = $("input[name='Name']").val();
let version = $("input[name='Version']").val();
let cName = $("input[name='name']").val();
let version = $("input[name='version']").val();
let data = $("#formId").serialize();
const initModel = $("input[name='initModel']").val();
let url_href =
@@ -387,7 +387,7 @@ export default {
let childrenIndex = store.states.lazyTreeNodeMap[
parentRow.rowKey
].findIndex((child) => child.rowKey == row.rowKey);
parentRow.VersionCount = parentRow.VersionCount - 1;
parentRow.versionCount = parentRow.versionCount - 1;
const parent = store.states.lazyTreeNodeMap[parentRow.rowKey];
if (parent.length === 1) {
this.getModelList();
@@ -398,7 +398,7 @@ export default {
}
},
deleteModel(id, name, rowKey) {
let row = { cName: name, ID: id, rowKey: rowKey };
let row = { cName: name, id: id, rowKey: rowKey };
let _this = this;
let flag = 1;
$(".ui.basic.modal.first")
@@ -410,7 +410,7 @@ export default {
_this.$axios
.delete(_this.url + "delete_model", {
params: {
ID: id,
id: id,
},
})
.then((res) => {
@@ -442,21 +442,21 @@ export default {
.modal("show");
},
getEngineName(model) {
if (model.Engine == 0) {
if (model.engine == 0) {
return "PyTorch";
} else if (model.Engine == 1 || model.Engine == 121) {
} else if (model.engine == 1 || model.engine == 121) {
return "TensorFlow";
} else if (
model.Engine == 2 ||
model.Engine == 122 ||
model.Engine == 35
model.engine == 2 ||
model.engine == 122 ||
model.engine == 35
) {
return "MindSpore";
} else if (model.Engine == 4) {
} else if (model.engine == 4) {
return "PaddlePaddle";
} else if (model.Engine == 5) {
} else if (model.engine == 5) {
return "OneFlow";
} else if (model.Engine == 6) {
} else if (model.engine == 6) {
return "MXNet";
} else {
return "Other";
@@ -480,34 +480,34 @@ export default {
.then((res) => {
$(".ui.grid").removeAttr("style");
$("#loadContainer").removeClass("loader");
let TrainTaskInfo;
let trainTaskInfo;
this.tableData = res.data.data;
for (let i = 0; i < this.tableData.length; i++) {
TrainTaskInfo = JSON.parse(this.tableData[i].TrainTaskInfo);
this.tableData[i].cName = this.tableData[i].Name;
this.tableData[i].rowKey = this.tableData[i].ID + Math.random();
this.tableData[i].EngineName = this.getEngineName(
trainTaskInfo = JSON.parse(this.tableData[i].trainTaskInfo);
this.tableData[i].cName = this.tableData[i].name;
this.tableData[i].rowKey = this.tableData[i].id + Math.random();
this.tableData[i].engineName = this.getEngineName(
this.tableData[i]
);
this.tableData[i].ComputeResource = TrainTaskInfo.ComputeResource;
this.tableData[i].computeResource = trainTaskInfo.ComputeResource;
this.tableData[i].hasChildren =
res.data.data[i].VersionCount === 1 ? false : true;
if (this.tableData[i].Status !== 1) {
res.data.data[i].versionCount === 1 ? false : true;
if (this.tableData[i].status !== 1) {
countStatus++;
}

switch (this.tableData[i].Status) {
switch (this.tableData[i].status) {
case 1:
this.tableData[i].Status = "WAITING";
this.tableData[i].Status_title = this.i18n.model_wait;
this.tableData[i].status = "WAITING";
this.tableData[i].status_title = this.i18n.model_wait;
break;
case 2:
this.tableData[i].Status = "FAILED";
this.tableData[i].Status_title = this.tableData[i].StatusDesc;
this.tableData[i].status = "FAILED";
this.tableData[i].status_title = this.tableData[i].statusDesc;
break;
default:
this.tableData[i].Status = "SUCCEEDED";
this.tableData[i].Status_title = this.i18n.model_success;
this.tableData[i].status = "SUCCEEDED";
this.tableData[i].status_title = this.i18n.model_success;
break;
}
}
@@ -531,7 +531,7 @@ export default {
},
computed: {
loadhref() {
return this.url + "downloadall?ID=";
return this.url + "downloadall?id=";
},
showinfoHref() {
return this.url + "show_model_info?name=";


+ 4
- 4
web_src/js/features/cloudbrainShow.js View File

@@ -799,9 +799,9 @@ export default async function initCloudrainSow() {
if (value) {
let html = "";
nameMap[value].forEach((element) => {
let { TrainTaskInfo } = element;
TrainTaskInfo = JSON.parse(TrainTaskInfo);
html += `<div class="item" data-label="${element.Label}" data-id="${element.ID}" data-value="${element.Path}">${element.Version}</div>`;
//let { trainTaskInfo } = element;
//trainTaskInfo = JSON.parse(trainTaskInfo);
html += `<div class="item" data-label="${element.label}" data-id="${element.id}" data-value="${element.path}">${element.version}</div>`;
});
$("#model_name_version").append(html);
const initVersionText = $(
@@ -937,7 +937,7 @@ export default async function initCloudrainSow() {
return new Promise((resolve, reject) => {
$.get(
`${RepoLink}/modelmanage/query_modelfile_for_predict`,
{ ID: value },
{ id: value },
(data) => {
resolve(data);
}


Loading…
Cancel
Save