#5220 V20240116

Merged
ychao_1983 merged 241 commits from V20240116 into develop 2 months ago
  1. +58
    -50
      entity/ai_task.go
  2. +1
    -0
      entity/cluster.go
  3. +1
    -0
      entity/container.go
  4. +3
    -0
      entity/creation.go
  5. +38
    -2
      manager/client/grampus/grampus.go
  6. +40
    -1
      models/action.go
  7. +25
    -0
      models/ai_model_manage.go
  8. +149
    -23
      models/cloudbrain.go
  9. +1
    -1
      models/model_migrate_record.go
  10. +1
    -0
      models/resource_specification.go
  11. +16
    -1
      models/reward_operate_record.go
  12. +5
    -0
      models/task_config.go
  13. +2
    -2
      modules/git/repo_compare.go
  14. +1
    -1
      modules/grampus/grampus.go
  15. +13
    -0
      modules/notification/action/action.go
  16. +1
    -0
      modules/notification/base/notifier.go
  17. +4
    -0
      modules/notification/base/null.go
  18. +7
    -0
      modules/notification/notification.go
  19. +7
    -0
      modules/redis/redis_key/ai_model_redis_key.go
  20. +4
    -0
      modules/setting/setting.go
  21. +40
    -0
      modules/storage/obs.go
  22. +8
    -3
      options/locale/locale_en-US.ini
  23. +8
    -3
      options/locale/locale_zh-CN.ini
  24. +9
    -0
      routers/ai_task/ai_task.go
  25. +5
    -1
      routers/api/v1/api.go
  26. +1
    -1
      routers/repo/ai_model_convert.go
  27. +55
    -4
      routers/repo/ai_model_manage.go
  28. +32
    -23
      routers/repo/ai_model_square.go
  29. +4
    -0
      routers/repo/aisafety.go
  30. +5
    -2
      routers/repo/attachment_model.go
  31. +63
    -41
      routers/repo/modelarts.go
  32. +1
    -15
      routers/repo/view.go
  33. +3
    -1
      routers/response/response_list.go
  34. +7
    -3
      routers/routes/routes.go
  35. +9
    -6
      routers/user/Invitation.go
  36. +9
    -1
      routers/user/auth.go
  37. +74
    -0
      services/ai_model/model_version.go
  38. +66
    -233
      services/ai_task_service/cluster/c2net.go
  39. +14
    -13
      services/ai_task_service/cluster/cloudbrain_one.go
  40. +62
    -12
      services/ai_task_service/cluster/cloudbrain_two.go
  41. +1
    -1
      services/ai_task_service/cluster/common.go
  42. +2
    -1
      services/ai_task_service/container_builder/code_builder.go
  43. +6
    -6
      services/ai_task_service/container_builder/common.go
  44. +25
    -21
      services/ai_task_service/container_builder/dataset_builder.go
  45. +1
    -1
      services/ai_task_service/container_builder/log_path_builder.go
  46. +1
    -1
      services/ai_task_service/container_builder/output_path_builder.go
  47. +86
    -104
      services/ai_task_service/container_builder/pre_model_builder.go
  48. +4
    -2
      services/ai_task_service/context/context.go
  49. +12
    -2
      services/ai_task_service/storage_helper/client.go
  50. +14
    -2
      services/ai_task_service/storage_helper/minio.go
  51. +13
    -1
      services/ai_task_service/storage_helper/obs.go
  52. +67
    -5
      services/ai_task_service/storage_helper/repo.go
  53. +5
    -1
      services/ai_task_service/task/cloudbrain_one_notebook_task.go
  54. +1
    -1
      services/ai_task_service/task/cloudbrain_one_train_task.go
  55. +1
    -1
      services/ai_task_service/task/cloudbrain_two_inference_task.go
  56. +14
    -5
      services/ai_task_service/task/cloudbrain_two_notebook_task.go
  57. +1
    -1
      services/ai_task_service/task/cloudbrain_two_train_task.go
  58. +1
    -1
      services/ai_task_service/task/grampus_inference_task.go
  59. +13
    -9
      services/ai_task_service/task/grampus_notebook_task.go
  60. +18
    -6
      services/ai_task_service/task/grampus_online_infer_task.go
  61. +3
    -2
      services/ai_task_service/task/grampus_train_task.go
  62. +49
    -31
      services/ai_task_service/task/opt_handler.go
  63. +138
    -0
      services/ai_task_service/task/sdk_util.go
  64. +1
    -1
      services/ai_task_service/task/super_compute_task.go
  65. +1
    -1
      services/ai_task_service/task/task_config.go
  66. +6
    -1
      services/ai_task_service/task/task_creation_info.go
  67. +33
    -47
      services/ai_task_service/task/task_extend.go
  68. +80
    -55
      services/ai_task_service/task/task_service.go
  69. +1
    -1
      templates/admin/cloudbrain/imagecommit.tmpl
  70. +12
    -7
      templates/repo/datasets/index.tmpl
  71. +0
    -1
      templates/repo/debugjob/index.tmpl
  72. +5
    -15
      templates/repo/home.tmpl
  73. +13
    -1
      templates/reward/point/rule.tmpl
  74. +1
    -0
      templates/user/auth/activate.tmpl
  75. +1
    -0
      templates/user/auth/bind_phone.tmpl
  76. +1
    -1
      templates/user/dashboard/dashboard.tmpl
  77. +3
    -3
      web_src/js/features/clipboard.js
  78. +198
    -0
      web_src/js/features/globalModalDlg.js
  79. +1
    -1
      web_src/js/features/highlight.js
  80. +10
    -0
      web_src/js/features/i18nVue.js
  81. +47
    -13
      web_src/js/index.js
  82. +1
    -1
      web_src/less/_home.less
  83. +44
    -0
      web_src/vuepages/apis/modules/common.js
  84. +161
    -0
      web_src/vuepages/components/CommonTipsDialog.vue
  85. +11
    -8
      web_src/vuepages/components/cloudbrain/AIEngineSelect.vue
  86. +16
    -13
      web_src/vuepages/components/cloudbrain/AlgBechmarkType.vue
  87. +13
    -40
      web_src/vuepages/components/cloudbrain/BootFile.vue
  88. +10
    -7
      web_src/vuepages/components/cloudbrain/BranchName.vue
  89. +11
    -14
      web_src/vuepages/components/cloudbrain/DatasetSelect.vue
  90. +44
    -34
      web_src/vuepages/components/cloudbrain/FormTop.vue
  91. +8
    -12
      web_src/vuepages/components/cloudbrain/ImageSelectV1.vue
  92. +11
    -16
      web_src/vuepages/components/cloudbrain/ImageSelectV2.vue
  93. +12
    -14
      web_src/vuepages/components/cloudbrain/ModelSelect.vue
  94. +69
    -83
      web_src/vuepages/components/cloudbrain/ModelSelectV2.vue
  95. +18
    -15
      web_src/vuepages/components/cloudbrain/NetworkType.vue
  96. +28
    -22
      web_src/vuepages/components/cloudbrain/RunParameters.vue
  97. +129
    -0
      web_src/vuepages/components/cloudbrain/SDKCode.vue
  98. +33
    -34
      web_src/vuepages/components/cloudbrain/SpecSelect.vue
  99. +11
    -8
      web_src/vuepages/components/cloudbrain/TaskDescr.vue
  100. +12
    -8
      web_src/vuepages/components/cloudbrain/TaskName.vue

+ 58
- 50
entity/ai_task.go View File

@@ -31,14 +31,11 @@ type CreateReq struct {
Cluster ClusterType `json:"cluster" binding:"Required"`
WorkServerNumber int `json:"work_server_number"`
BranchName string `json:"branch_name"`
PreTrainModelUrl string `json:"pretrain_model_url"`
PretrainModelCkptName string `json:"pretrain_model_ckpt_name"`
ImageUrl string `json:"image_url"`
ImageID string `json:"image_id"`
ImageName string `json:"image_name"`
PretrainModelName string `json:"pretrain_model_name"`
PretrainModelVersion string `json:"pretrain_model_version"`
PretrainModelId string `json:"pretrain_model_id"`
PretrainModelId string `json:"pretrain_model_id_str"`
Description string `json:"description"`
LabelName string `json:"label_names"`
DatasetUUIDStr string `json:"dataset_uuid_str"`
@@ -57,6 +54,7 @@ type CreateReq struct {
FileBranchName string
IsRestartRequest bool
DatasetNames string
ModelNames string
}

type CreationResponse struct {
@@ -83,12 +81,13 @@ func (r *QueryAITaskRes) TryToRemoveDatasetAndModelInfo(currentUser *models.User
if r.Task != nil {
r.Task.TryToRemoveDatasets(currentUser)
r.Task.TryToRemovePretrainModelList(currentUser)
r.Task.TryToRemoveSDKCode(currentUser)
}
if r.EarlyVersionList != nil {
for _, t := range r.EarlyVersionList {
t.TryToRemoveDatasets(currentUser)
t.TryToRemovePretrainModelList(currentUser)
t.TryToRemoveSDKCode(currentUser)
}
}
}
@@ -104,49 +103,48 @@ func (r *QueryAITaskRes) Tr(language string) {
}

type AITaskDetailInfo struct {
ID int64 `json:"id"`
JobID string `json:"job_id"`
Status string `json:"status"`
DetailedStatus string `json:"detailed_status"`
JobType string `json:"job_type"`
Cluster string `json:"cluster"`
DisplayJobName string `json:"display_job_name"`
FormattedDuration string `json:"formatted_duration"`
ComputeSource string `json:"compute_source"`
AICenter string `json:"ai_center"`
BootFile string `json:"boot_file"`
PreVersionName string `json:"pre_version_name"`
CurrentVersionName string `json:"current_version_name"`
WorkServerNumber int `json:"work_server_number"`
Spec *structs.SpecificationShow `json:"spec"`
DatasetList []*models.DatasetDownload `json:"dataset_list"`
PretrainModelList []*models.ModelDownload `json:"pretrain_model_list"`
Parameters *models.Parameters `json:"parameters"`
CreatedUnix timeutil.TimeStamp `json:"created_unix"`
CodePath string `json:"code_path"`
DatasetPath string `json:"dataset_path"`
PretrainModelPath string `json:"pretrain_model_path"`
PretrainModelUrl string `json:"pretrain_model_url"`
OutputPath string `json:"output_path"`
CodeUrl string `json:"code_url"`
PretrainModelName string `json:"pretrain_model_name"`
PretrainModelVersion string `json:"pretrain_model_version"`
PretrainCkptName string `json:"pretrain_model_ckpt_name"`
PretrainModelId string `json:"pretrain_model_id"`
StartTime timeutil.TimeStamp `json:"start_time"`
EndTime timeutil.TimeStamp `json:"end_time"`
Description string `json:"description"`
CommitID string `json:"commit_id"`
BranchName string `json:"branch_name"`
ImageUrl string `json:"image_url"`
ImageID string `json:"image_id"`
ImageName string `json:"image_name"`
CreatorName string `json:"creator_name"`
EngineName string `json:"engine_name"`
FailedReason string `json:"failed_reason"`
UserId int64 `json:"-"`
AppName string `json:"app_name"`
HasInternet int `json:"has_internet"`
ID int64 `json:"id"`
JobID string `json:"job_id"`
Status string `json:"status"`
DetailedStatus string `json:"detailed_status"`
JobType string `json:"job_type"`
Cluster string `json:"cluster"`
DisplayJobName string `json:"display_job_name"`
FormattedDuration string `json:"formatted_duration"`
ComputeSource string `json:"compute_source"`
AICenter string `json:"ai_center"`
BootFile string `json:"boot_file"`
PreVersionName string `json:"pre_version_name"`
CurrentVersionName string `json:"current_version_name"`
WorkServerNumber int `json:"work_server_number"`
Spec *structs.SpecificationShow `json:"spec"`
DatasetList []*models.DatasetDownload `json:"dataset_list"`
PretrainModelList []*models.Model4Show `json:"pretrain_model_list"`
SDKCode string `json:"sdk_code"`
Parameters *models.Parameters `json:"parameters"`
CreatedUnix timeutil.TimeStamp `json:"created_unix"`
CodePath string `json:"code_path"`
DatasetPath string `json:"dataset_path"`
PretrainModelPath string `json:"pretrain_model_path"`
PretrainModelUrl string `json:"pretrain_model_url"`
OutputPath string `json:"output_path"`
CodeUrl string `json:"code_url"`
PretrainModelName string `json:"pretrain_model_name"`
PretrainModelId string `json:"pretrain_model_id"`
StartTime timeutil.TimeStamp `json:"start_time"`
EndTime timeutil.TimeStamp `json:"end_time"`
Description string `json:"description"`
CommitID string `json:"commit_id"`
BranchName string `json:"branch_name"`
ImageUrl string `json:"image_url"`
ImageID string `json:"image_id"`
ImageName string `json:"image_name"`
CreatorName string `json:"creator_name"`
EngineName string `json:"engine_name"`
FailedReason string `json:"failed_reason"`
UserId int64 `json:"-"`
AppName string `json:"app_name"`
HasInternet int `json:"has_internet"`
}

func (a *AITaskDetailInfo) Tr(language string) {
@@ -160,7 +158,12 @@ func (a *AITaskDetailInfo) TryToRemoveDatasets(currentUser *models.User) {
}
func (a *AITaskDetailInfo) TryToRemovePretrainModelList(currentUser *models.User) {
if currentUser == nil || a.UserId == 0 || (!currentUser.IsAdmin && currentUser.ID != a.UserId) {
a.PretrainModelList = []*models.ModelDownload{}
a.PretrainModelList = []*models.Model4Show{}
}
}
func (a *AITaskDetailInfo) TryToRemoveSDKCode(currentUser *models.User) {
if currentUser == nil || a.UserId == 0 || (!currentUser.IsAdmin && currentUser.ID != a.UserId) {
a.SDKCode = ""
}
}

@@ -267,7 +270,10 @@ func ConvertCloudbrainToAITaskBriefInfo(task *models.Cloudbrain) *AITaskBriefInf
}

type NotebookDataset struct {
DatasetUrl string `json:"dataset_url"`
DatasetUrl string `json:"dataset_url"`
DatasetName string `json:"dataset_name"`
ContainerPath string `json:"containerPath"`
ReadOnly bool `json:"readOnly"`
}

type QueryLogOpts struct {
@@ -340,6 +346,8 @@ type AITaskBaseConfig struct {
IsActionUseJobId bool `json:"is_action_use_job_id"`
DatasetsLimitSizeGB int
DatasetsMaxNum int
ModelMaxNum int
ModelLimitSizeGB int
}

func GetAITaskConfigFromCloudbrainConfig(config *models.CloudbrainConfig) *AITaskBaseConfig {


+ 1
- 0
entity/cluster.go View File

@@ -35,6 +35,7 @@ type NoteBookTask struct {
ResourceSpecId string
BootFile string
Spec *models.Specification
EnvVariables models.GrampusEnvVarReq
}

type CreateNoteBookTaskResponse struct {


+ 1
- 0
entity/container.go View File

@@ -20,6 +20,7 @@ type ContainerData struct {
S3DownloadUrl string `json:"s3DownloadUrl"`
Size int64 `json:"size"`
IsOverwrite bool `json:"isOverwrite"`
IsNeedUnzip bool `json:"isNeedUnzip"`
StorageType StorageType
}



+ 3
- 0
entity/creation.go View File

@@ -28,6 +28,9 @@ type ImageRequiredInfo struct {

type AITaskCreationConfig struct {
DatasetMaxSize int `json:"dataset_max_size"`
DatasetsMaxNum int `json:"dataset_max_num"`
ModelMaxSize int `json:"model_max_size"`
ModelMaxNum int `json:"model_max_num"`
}

type SpecificationInfo struct {


+ 38
- 2
manager/client/grampus/grampus.go View File

@@ -30,6 +30,7 @@ const (
urlGetAiCenter = urlOpenApiV1 + "sharescreen/aicenter"
urlGetImages = urlOpenApiV1 + "image"
urlNotebookJob = urlOpenApiV1 + "notebook"
urlInferenceJob = urlOpenApiV1 + "inference"

errorIllegalToken = 1005
errorCannotStopCreatingJob = 5008
@@ -93,6 +94,41 @@ func getToken() error {
return nil
}

func CreateInferenceJob(req models.CreateGrampusInferenceRequest) (*models.GrampusNotebookResponse, error) {
checkSetting()
client := getRestyClient()
var result models.GrampusNotebookResponse
reqJson, _ := json.Marshal(req)
log.Info("Online infer REQ:" + string(reqJson))
retry := 0

sendjob:
_, err := client.R().
SetHeader("Content-Type", "application/json").
SetAuthToken(TOKEN).
SetBody(req).
SetResult(&result).
Post(HOST + urlInferenceJob)

if err != nil {
log.Error("resty CreateInferenceJob: %v", err)
return nil, models.NetworkError{}
}

if result.ErrorCode == errorIllegalToken && retry < 1 {
retry++
_ = getToken()
goto sendjob
}

if result.ErrorCode != 0 {
log.Error("CreateInferenceJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
return &result, fmt.Errorf("CreateNotebookJob failed(%d): %s", result.ErrorCode, result.ErrorMsg)
}
log.Info("CreateInferenceJob success.req.JobName = %s ,result=%+v", req.Name, result)
return &result, nil
}

func CreateNotebookJob(req models.CreateGrampusNotebookRequest) (*models.GrampusNotebookResponse, error) {
checkSetting()
client := getRestyClient()
@@ -169,7 +205,7 @@ func GetNotebookJob(jobID string) (*models.GrampusNotebookResponse, error) {
retry := 0

sendjob:
_, err := client.R().
body, err := client.R().
SetAuthToken(TOKEN).
SetResult(&result).
Get(HOST + urlNotebookJob + "/" + jobID)
@@ -177,7 +213,7 @@ sendjob:
if err != nil {
return nil, fmt.Errorf("resty GetNotebookJob: %v", err)
}
log.Info("%+v", body)
if result.ErrorCode == errorIllegalToken && retry < 1 {
retry++
log.Info("retry get token")


+ 40
- 1
models/action.go View File

@@ -78,6 +78,7 @@ const (
ActionCreateGrampusMETAXDebugTask //49
ActionCreateGrampusGPUInferenceTask //50
ActionCreateGrampusILUVATARInferenceTask //51
ActionInviteFriendRegister //52
)

// Action represents user operation type and other information to
@@ -112,6 +113,15 @@ type ActionShow struct {
IssueInfos []string
CommentLink string
Cloudbrain *CloudbrainShow4Action
Data map[string]interface{}
}

func (a *ActionShow) AddData(key string, val interface{}) {
if a.Data == nil {
a.Data = map[string]interface{}{key: val}
} else {
a.Data[key] = val
}
}

// GetOpType gets the ActionType of this action.
@@ -293,7 +303,28 @@ func (a *Action) ToShow() *ActionShow {
if strings.Contains(a.Content, "|") && a.IsIssueAction() {
actionShow.IssueInfos = a.GetIssueInfos()
}

if strings.Contains(a.Content, "|") && a.IsInviteAction() {
ids := strings.Split(a.Content, "|")
if len(ids) >= 2 {
var invitedId int64
var invitedName string
if len(ids) >= 4 {
invitedName = ids[3]
}
invitedId, _ = strconv.ParseInt(ids[1], 10, 64)
if invitedId > 0 {
invitedUser, _ := GetUserByID(invitedId)
if invitedUser != nil {
actionShow.AddData("InvitedUserName", invitedUser.Name)
actionShow.AddData("InvitedUserNotExists", false)
} else {
actionShow.AddData("InvitedUserName", invitedName)
actionShow.AddData("InvitedUserNotExists", true)
}
}
}
actionShow.IssueInfos = a.GetIssueInfos()
}
if a.Repo != nil {
actionShow.RepoLink = a.GetRepoLink()
actionShow.ShortRepoFullDisplayName = a.ShortRepoFullDisplayName()
@@ -459,6 +490,14 @@ func (a *Action) IsIssueAction() bool {
return false
}

func (a *Action) IsInviteAction() bool {
switch a.OpType {
case ActionInviteFriendRegister:
return true
}
return false
}

// GetFeedsOptions options for retrieving feeds
type GetFeedsOptions struct {
RequestedUser *User // the user we want activity for


+ 25
- 0
models/ai_model_manage.go View File

@@ -872,3 +872,28 @@ func QueryModelRepoByModelID(modelId string) (*Repository, error) {
}
return r, nil
}

func QueryModelMapsByIds(ids []string) (map[string]*AiModelManage, error) {
sess := x.NewSession()
defer sess.Close()
re := make([]*AiModelManage, 0)
err := sess.Table(new(AiModelManage)).In("id", ids).Find(&re)
if err != nil {
return nil, err
}
resultMap := make(map[string]*AiModelManage, 0)
for _, m := range re {
resultMap[m.ID] = m
}
return resultMap, nil
}

//created_unix
func QueryModelIdsByPaging(pageSize, pageNum int, sort string) ([]string, error) {
sess := x.NewSession()
defer sess.Close()
re := make([]string, 0)
start := (pageNum - 1) * pageSize
err := sess.Table("ai_model_manage").Cols("id").OrderBy(sort).Limit(pageSize, start).Find(&re)
return re, err
}

+ 149
- 23
models/cloudbrain.go View File

@@ -149,6 +149,24 @@ const (
OpenICluster = "OpenI"
C2NetCluster = "C2Net"

//cloudbrain two sdk PathValue
LocalCodePath = "/home/ma-user/work/code"
LocalDatasetPath = "/home/ma-user/work/dataset"
LocalPretrainModelPath = "/home/ma-user/work/pretrainmodel"
LocalOutputPath = "/home/ma-user/work/output"

DataDownloadMethodMount = "MOUNT"
DataDownloadMethodMoxing = "MOXING"

CodeNeedUnzipTrue = "true"
CodeNeedUnzipFalse = "false"

DatasetNeedUnzipTrue = "true"
DatasetNeedUnzipFalse = "false"

PretrainModelNeedUnzipTrue = "true"
PretrainModelNeedUnzipFalse = "false"

//AI center
AICenterOfCloudBrainOne = "OpenIOne"
AICenterOfCloudBrainTwo = "OpenITwo"
@@ -378,6 +396,45 @@ func (task *Cloudbrain) ToShow() *CloudbrainShow {
return c
}

func (task *Cloudbrain) IsRestartTask() bool {
n, _ := x.Where("display_job_name = ?", task.DisplayJobName).Unscoped().Count(&Cloudbrain{})
if n > 1 {
return true
}
return false
}

func (task *Cloudbrain) HasUseModel(modelId string) bool {
modelIDArray := task.GetModelIdArray()
if modelIDArray == nil || len(modelIDArray) == 0 {
return false
}
for _, id := range modelIDArray {
if id == modelId {
return true
}
}
return false
}

func (task *Cloudbrain) GetModelIdArray() []string {
if task.ModelId == "" {
return []string{}
}
modelIdStr := strings.TrimSuffix(task.ModelId, ";")
modelIDArray := strings.Split(modelIdStr, ";")
return modelIDArray
}

func (task *Cloudbrain) GetModelNameArray() []string {
if task.ModelName == "" {
return []string{}
}
modelNameStr := strings.TrimSuffix(task.ModelName, ";")
modelNameArray := strings.Split(modelNameStr, ";")
return modelNameArray
}

func (task *Cloudbrain) GetStandardComputeSource() string {
return GetComputeSourceStandardFormat(task.ComputeResource)
}
@@ -1163,15 +1220,16 @@ type CloudBrainResult struct {
}

type CreateNotebook2Params struct {
JobName string `json:"name"`
Description string `json:"description"`
Duration int64 `json:"duration"` //ms
Feature string `json:"feature"`
PoolID string `json:"pool_id"`
Flavor string `json:"flavor"`
ImageID string `json:"image_id"`
WorkspaceID string `json:"workspace_id"`
Volume VolumeReq `json:"volume"`
JobName string `json:"name"`
Description string `json:"description"`
Duration int64 `json:"duration"` //ms
Feature string `json:"feature"`
PoolID string `json:"pool_id"`
Flavor string `json:"flavor"`
ImageID string `json:"image_id"`
WorkspaceID string `json:"workspace_id"`
Volume VolumeReq `json:"volume"`
EnvVariables CloudBrain2EnvVarReq `json:"env_variables"`
}

type CreateNotebookWithoutPoolParams struct {
@@ -1185,6 +1243,20 @@ type CreateNotebookWithoutPoolParams struct {
Volume VolumeReq `json:"volume"`
}

type CloudBrain2EnvVarReq struct {
CodeObsUrl string `json:"CODE_URL"`
DatasetObsUrl string `json:"DATASET_URL"`
PretrainedModelObsUrl string `json:"PRETRAIN_MODEL_URL"`
OutputObsUrl string `json:"OUTPUT_URL"`
LocalCodePath string `json:"LOCAL_CODE_PATH"`
LocalDatasetPath string `json:"LOCAL_DATASET_PATH"`
LocalPretrainModelPath string `json:"LOCAL_PRETRAIN_MODEL_PATH"`
LocalOutputPath string `json:"LOCAL_OUTPUT_PATH"`
DataDownloadMethod string `json:"DATA_DOWNLOAD_METHOD"`
CodeNeedUnzip string `json:"CODE_NEED_UNZIP"`
DatasetNeedUnzip string `json:"DATASET_NEED_UNZIP"`
PretrainModelNeedUnzip string `json:"PRETRAIN_MODEL_NEED_UNZIP"`
}
type VolumeReq struct {
Capacity int `json:"capacity"`
Category string `json:"category"`
@@ -1606,6 +1678,15 @@ type ModelDownload struct {
IsDelete bool `json:"is_delete"`
}

type Model4Show struct {
ID string `json:"id"`
Name string `json:"name"`
RepositoryLink string `json:"repository_link"`
IsDelete bool `json:"is_delete"`
//DownloadLink string `json:"download_link"`
Size int64 `json:"size"`
}

type DataSource struct {
DatasetID string `json:"dataset_id"`
DatasetVersion string `json:"dataset_version"`
@@ -2035,22 +2116,46 @@ type GetGrampusDebugJobEventsResponse struct {
}

type GrampusTasks struct {
Command string `json:"command"`
Command string `json:"command"`
Name string `json:"name"`
ImageId string `json:"imageId"`
ResourceSpecId string `json:"resourceSpecId"`
ImageUrl string `json:"imageUrl"`
CenterID []string `json:"centerID"`
CenterName []string `json:"centerName"`
ReplicaNum int `json:"replicaNum"`
Datasets []GrampusDataset `json:"datasets"`
Models []GrampusDataset `json:"models"`
Code GrampusDataset `json:"code"`
BootFile string `json:"bootFile"`
OutPut GrampusDataset `json:"output"`
WorkServerNumber int `json:"nodeCount"`
RunParams map[string]interface{} `json:"runParams"`
}
type GrampusNotebookTask struct {
AutoStopDuration int64 `json:"autoStopDuration"`
Name string `json:"name"`
ImageId string `json:"imageId"`
ResourceSpecId string `json:"resourceSpecId"`
ImageUrl string `json:"imageUrl"`
Capacity int `json:"capacity"`
CenterID []string `json:"centerID"`
CenterName []string `json:"centerName"`
ReplicaNum int `json:"replicaNum"`
Datasets []GrampusDataset `json:"datasets"`
Models []GrampusDataset `json:"models"`
PoolId string `json:"poolId"`
Code GrampusDataset `json:"code"`
BootFile string `json:"bootFile"`
Datasets []GrampusDataset `json:"datasets"`
PreTrainModel []GrampusDataset `json:"models"`
OutPut GrampusDataset `json:"output"`
WorkServerNumber int `json:"nodeCount"`
CodeUrl string `json:"codeUrl"`
DataUrl string `json:"dataUrl"`
ImageId string `json:"imageId"`
ImageUrl string `json:"imageUrl"`
ResourceSpecId string `json:"resourceSpecId"`
Token string `json:"token"`
Url string `json:"url"`
Status string `json:"status"`
Command string `json:"command"`
EnvVariables GrampusEnvVarReq `json:"envVariables"`
}
type GrampusNotebookTask struct {

type GrampusInferenceTask struct {
AutoStopDuration int64 `json:"autoStopDuration"`
Name string `json:"name"`
Capacity int `json:"capacity"`
@@ -2059,6 +2164,7 @@ type GrampusNotebookTask struct {
PoolId string `json:"poolId"`
Code GrampusDataset `json:"code"`
Datasets []GrampusDataset `json:"datasets"`
PreTrainModel []GrampusDataset `json:"models"`
OutPut GrampusDataset `json:"output"`
CodeUrl string `json:"codeUrl"`
DataUrl string `json:"dataUrl"`
@@ -2069,6 +2175,8 @@ type GrampusNotebookTask struct {
Url string `json:"url"`
Status string `json:"status"`
Command string `json:"command"`
EnvVariables GrampusEnvVarReq `json:"envVariables"`
BootFile string `json:"bootFile"`
}

type GrampusDataset struct {
@@ -2081,6 +2189,18 @@ type GrampusDataset struct {
GetBackEndpoint string `json:"getBackEndpoint"`
Size int64 `json:"size"`
IsOverwrite bool `json:"isOverwrite"`
IsNeedUnzip bool `json:"isNeedUnzip"`
}

type GrampusEnvVarReq struct {
MoxingRequired string `json:"MOXING_REQUIRED"`
UploadOpeniRequired string `json:"UPLOAD_OPENI_REQUIRED"`
UnzipRequired string `json:"UNZIP_REQUIRED"`
CodePathValue string `json:"CODE_PATH"`
DatasetPathValue string `json:"DATASET_PATH"`
PretrainedModelPathValue string `json:"PRETRAIN_MODEL_PATH"`
OutputPathValue string `json:"OUTPUT_PATH"`
OutputObsUrl string `json:"OUTPUT_URL"`
}

type CreateGrampusJobRequest struct {
@@ -2093,6 +2213,11 @@ type CreateGrampusNotebookRequest struct {
Tasks []GrampusNotebookTask `json:"tasks"`
}

type CreateGrampusInferenceRequest struct {
Name string `json:"name"`
Tasks []GrampusInferenceTask `json:"tasks"`
}

type GetTrainJobMetricStatisticResult struct {
TrainJobResult
Interval int `json:"interval"` //查询的时间间隔,单位为分钟
@@ -2459,8 +2584,9 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) {
func updateReferenceCount(cloudbrain *Cloudbrain) {
increaseDatasetUseCount(cloudbrain.Uuid)
increaseImageUseCount(cloudbrain.Image)
increaseModelReference(cloudbrain.ModelId)

for _, id := range cloudbrain.GetModelIdArray() {
increaseModelReference(id)
}
}

func increaseImageUseCount(image string) {
@@ -3641,13 +3767,13 @@ func LoadSpecs4CloudbrainInfo(tasks []*CloudbrainInfo) error {

func GetCloudBrainByModelId(modelId string) ([]*Cloudbrain, error) {
cloudBrains := make([]*Cloudbrain, 0)
err := x.AllCols().Where("model_id=?", modelId).OrderBy("created_unix desc").Find(&cloudBrains)
err := x.AllCols().Where("model_id like ?", "%"+modelId+"%").OrderBy("created_unix desc").Find(&cloudBrains)
return cloudBrains, err
}

func GetCloudBrainByRepoIdAndModelName(repoId int64, modelName string) ([]*Cloudbrain, error) {
cloudBrains := make([]*Cloudbrain, 0)
err := x.AllCols().Where("model_name=? and repo_id=?", modelName, repoId).OrderBy("created_unix asc").Find(&cloudBrains)
err := x.AllCols().Where("model_name like ? and repo_id=?", "%"+modelName+"%", repoId).OrderBy("created_unix asc").Find(&cloudBrains)
return cloudBrains, err
}



+ 1
- 1
models/model_migrate_record.go View File

@@ -89,7 +89,7 @@ type ModelMigrateRecord struct {
CreatedUnix timeutil.TimeStamp `xorm:"created"`
UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
DeletedAt time.Time `xorm:"deleted"`
Remark string
Remark string `xorm:"TEXT"`
}

func (r *ModelMigrateRecord) IsFinished() bool {


+ 1
- 0
models/resource_specification.go View File

@@ -357,6 +357,7 @@ func (s *Specification) GetAvailableQueues(opts GetAvailableCenterIdOpts) []Reso
relatedSpecs := s.findRelatedSpecs(specOpts, opts.UserId)

if len(relatedSpecs) == 0 {
log.Info("check centerIds opt%+v relatedSpecs is empty", opts)
return make([]ResourceQueue, 0)
}



+ 16
- 1
models/reward_operate_record.go View File

@@ -524,7 +524,14 @@ func GetRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList,
return nil, 0, err
}
RewardRecordShowList(r).loadAttribute(false)
return r, count, nil
newShowList := make([]*RewardOperateRecordShow, 0)
for i := 0; i < len(r); i++ {
if r[i].SourceType == SourceTypeAccomplishTask.Name() && r[i].Action == nil {
continue
}
newShowList = append(newShowList, r[i])
}
return newShowList, count, nil
}

func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowList, int64, error) {
@@ -559,6 +566,14 @@ func GetAdminRewardRecordShowList(opts *RewardRecordListOpts) (RewardRecordShowL
return nil, 0, err
}
RewardRecordShowList(r).loadAttribute(true)
newShowList := make([]*RewardOperateRecordShow, 0)
for i := 0; i < len(r); i++ {
if r[i].SourceType == SourceTypeAccomplishTask.Name() && r[i].Action == nil {
continue
}
newShowList = append(newShowList, r[i])
}
return newShowList, count, nil
return r, count, nil
}



+ 5
- 0
models/task_config.go View File

@@ -27,6 +27,7 @@ const (
TaskImageRecommend TaskType = "ImageRecommend"
TaskChangeUserAvatar TaskType = "ChangeUserAvatar"
TaskPushCommits TaskType = "PushCommits"
TaskInviteFriendRegister TaskType = "TaskInviteFriendRegister"
)

func (t TaskType) ChineseName() string {
@@ -57,6 +58,8 @@ func (t TaskType) ChineseName() string {
return "首次更换头像"
case TaskPushCommits:
return "每日commit"
case TaskInviteFriendRegister:
return "邀请好友"
}
return "--"
}
@@ -111,6 +114,8 @@ func GetTaskTypeFromAction(a ActionType) TaskType {
return TaskPushCommits
case ActionCreateIssue:
return TaskCreateIssue
case ActionInviteFriendRegister:
return TaskInviteFriendRegister
}
return ""
}


+ 2
- 2
modules/git/repo_compare.go View File

@@ -32,7 +32,7 @@ func (repo *Repository) GetMergeBase(tmpRemote string, base, head string) (strin
if tmpRemote != "origin" {
tmpBaseName := "refs/remotes/" + tmpRemote + "/tmp_" + base
// Fetch commit into a temporary branch in order to be able to handle commits and tags
_, err := NewCommand("fetch", tmpRemote, base+":"+tmpBaseName,"--no-tags").RunInDir(repo.Path)
_, err := NewCommand("fetch", tmpRemote, base+":"+tmpBaseName, "--no-tags").RunInDir(repo.Path)
if err == nil {
base = tmpBaseName
}
@@ -58,7 +58,7 @@ func (repo *Repository) GetCompareInfo(basePath, baseBranch, headBranch string)
}
defer func() {
if err := repo.RemoveRemote(tmpRemote); err != nil {
logger.Error("GetPullRequestInfo: RemoveRemote: %v", err)
logger.Error("GetPullRequestInfo: RemoveRemote: RepoPath=%s: %v", basePath, err)
}
}()
}


+ 1
- 1
modules/grampus/grampus.go View File

@@ -36,7 +36,7 @@ const (
BucketRemote = "grampus"
RemoteModelPath = "/output"
autoStopDurationMs = 4 * 60 * 60 * 1000
CommandGpuDebug = "! [ -x \"$(command -v jupyter)\" ] && pip install jupyterlab==3 -i https://pypi.tuna.tsinghua.edu.cn/simple;jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='%s' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"
CommandGpuDebug = "jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='%s' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"
)

var (


+ 13
- 0
modules/notification/action/action.go View File

@@ -421,3 +421,16 @@ func (t *actionNotifier) NotifyChangeUserAvatar(user *models.User, form auth.Ava
log.Error("notifyWatchers: %v", err)
}
}

func (t *actionNotifier) NotifyInviteFriendRegister(inviter, invited *models.User) {
act := &models.Action{
ActUserID: inviter.ID,
ActUser: inviter,
OpType: models.ActionInviteFriendRegister,
IsPrivate: true,
Content: fmt.Sprintf("%d|%d|%s|%s", inviter.ID, invited.ID, inviter.Name, invited.Name),
}
if err := models.NotifyWatchers(act); err != nil {
log.Error("notifyWatchers: %v", err)
}
}

+ 1
- 0
modules/notification/base/notifier.go View File

@@ -67,4 +67,5 @@ type Notifier interface {
NotifyChangeCloudbrainStatus(cloudbrain *models.Cloudbrain, oldStatus string)
NotifyCloudbrainTaskComingToFinished(cloudbrain *models.Cloudbrain, endTime timeutil.TimeStamp, account *models.PointAccount)
NotifyChangeFinetuneStatus(deployment *models.ModelartsDeploy)
NotifyInviteFriendRegister(inviter, invited *models.User)
}

+ 4
- 0
modules/notification/base/null.go View File

@@ -189,3 +189,7 @@ func (*NullNotifier) NotifyCloudbrainTaskComingToFinished(cloudbrain *models.Clo
func (*NullNotifier) NotifyChangeFinetuneStatus(deployment *models.ModelartsDeploy) {

}

func (*NullNotifier) NotifyInviteFriendRegister(inviter, invited *models.User) {

}

+ 7
- 0
modules/notification/notification.go View File

@@ -333,3 +333,10 @@ func NotifyChangeFinetuneStatus(deployment *models.ModelartsDeploy) {
notifier.NotifyChangeFinetuneStatus(deployment)
}
}

// NotifyInviteFriendRegister
func NotifyInviteFriendRegister(inviter, invited *models.User) {
for _, notifier := range notifiers {
notifier.NotifyInviteFriendRegister(inviter, invited)
}
}

+ 7
- 0
modules/redis/redis_key/ai_model_redis_key.go View File

@@ -0,0 +1,7 @@
package redis_key

const AI_MODEL_REDIS_PREFIX = "ai_model"

func AIModelMetaUpdateLock(modelId string) string {
return KeyJoin(AI_MODEL_REDIS_PREFIX, modelId, "meta", "lock")
}

+ 4
- 0
modules/setting/setting.go View File

@@ -743,6 +743,8 @@ var (
OUTPUT_SHOW_MAX_KEY int
OUTPUT_DOWNLOAD_MAX_KEY int
SPECIFICATION_SPECIAL_QUEUE string
DEBUG_MODEL_NUM_LIMIT int
DEBUG_MODEL_SIZE_LIMIT_GB int

//wenxin url
BaiduWenXin = struct {
@@ -1617,6 +1619,8 @@ func NewContext() {
OUTPUT_SHOW_MAX_KEY = sec.Key("OUTPUT_SHOW_MAX_KEY").MustInt(100)
OUTPUT_DOWNLOAD_MAX_KEY = sec.Key("OUTPUT_DOWNLOAD_MAX_KEY").MustInt(1000)
SPECIFICATION_SPECIAL_QUEUE = sec.Key("SPECIFICATION_SPECIAL_QUEUE").MustString("{}")
DEBUG_MODEL_NUM_LIMIT = sec.Key("DEBUG_MODEL_NUM_LIMIT").MustInt(5)
DEBUG_MODEL_SIZE_LIMIT_GB = sec.Key("DEBUG_MODEL_SIZE_LIMIT_GB").MustInt(20)

sec = Cfg.Section("benchmark")
IsBenchmarkEnabled = sec.Key("ENABLED").MustBool(false)


+ 40
- 0
modules/storage/obs.go View File

@@ -902,3 +902,43 @@ func GetDirsSomeFile(bucket string, prefixRootPath string, prefix string, marker
}
return fileInfos, marker, nil
}

func SetObsObjectMetaData(bucket, key string, metaData map[string]string) error {
log.Info("SetObsObjectMetaData bucket=%s key=%s metaData=%v", bucket, key, metaData)
input := &obs.SetObjectMetadataInput{}
input.Bucket = bucket
input.Key = key
input.Metadata = metaData
_, err := ObsCli.SetObjectMetadata(input)
if err != nil {
if obsError, ok := err.(obs.ObsError); ok {
log.Info("Message:%s\n", obsError.Message)
if obsError.StatusCode == 404 {
putInput := &obs.PutObjectInput{}
putInput.Bucket = bucket
putInput.Key = key
ObsCli.PutObject(putInput)
_, err = ObsCli.SetObjectMetadata(input)
}
}
}
return err
}

func GetObsObjectMetaData(bucket, key string) (map[string]string, error) {
log.Info("GetObsObjectMetaData bucket=%s key=%s", bucket, key)
input := &obs.GetObjectMetadataInput{}
input.Bucket = bucket
input.Key = key
output, err := ObsCli.GetObjectMetadata(input)
if err != nil {
if obsError, ok := err.(obs.ObsError); ok {
log.Info("Message:%s\n", obsError.Message)
}
return nil, err
}
if output == nil {
return map[string]string{}, nil
}
return output.Metadata, err
}

+ 8
- 3
options/locale/locale_en-US.ini View File

@@ -418,7 +418,7 @@ new_email_address = New email address
openi_community_really_awesome = OpenI, Really Awesome!
protocol_header=Please read the following content carefully:
protocol_title=Dear OpenI User
protocol_context=Thank you for your continuous support to the Openl Qizhi Community AI Collaboration Platform. In order to protect your usage rights and ensure network security, we updated the Openl Qizhi Community AI Collaboration Platform Usage Agreement in January 2024. The updated agreement specifies that users are prohibited from using intranet penetration tools. After you check and agree, you can continue to use our services. Thank you for your cooperation and understanding.
protocol_context=Thank you for your continuous support to the Openl Qizhi Community AI Collaboration Platform. In order to protect your usage rights and ensure network security, we updated the Openl Qizhi Community AI Collaboration Platform Usage Agreement in January 2024. The updated agreement specifies that users are prohibited from using intranet penetration tools. After you click "Agree and continue", you can continue to use our services. Thank you for your cooperation and understanding.
protocol_context_sub=For more agreement content, please refer to the<u><font color="# 3291f8"><a href="/home/term" target="_blank">《Openl Qizhi Community AI Collaboration Platform Usage Agreement》</a></font></u>
protocol_confirm=Agree and continue
protocol_cancel=Disagree, exit
@@ -568,6 +568,7 @@ all = All
form.name_reserved = The username '%s' is reserved.
form.name_pattern_not_allowed = The pattern '%s' is not allowed in a username.
form.name_chars_not_allowed = User name '%s' contains invalid characters.
form.username_and_invited_code_duplicated = The username and referrer cannot be the same.

static.invitationdetailsheetname=User Invitation Detail
static.invitationNum=User Invitation Count
@@ -990,6 +991,7 @@ dataset_available_clusters = Available Clusters
dataset_upload_time = Upload Time
download = Download
modify_description = Modify Description
copy_code = Copy code
set_public = Set Public
set_private = Set Private
annotation = Image Annotation
@@ -1084,6 +1086,7 @@ language_other = Other
datasets = Datasets
datasets.desc = Enable Dataset
cloudbrain_helper=Use GPU/NPU resources to open notebooks, model training tasks, etc.
code_use_resource=How to access data resources in code
cloudbrain.exitinfo=Exit Information
cloudbrain.platform=Platform
cloudbrain.endtime=End Time
@@ -1151,7 +1154,7 @@ images.name_placerholder = Please enter the image name
images.descr_placerholder = The description should not exceed 1000 characters
image.label_tooltips = Example Python 3.7, Tensorflow 2.0, cuda 10, pytorch 1.6
images.public_tooltips = After the image is set to public, it can be seen by other users.
images.submit_tooltips = The code directory /code, dataset directory /dataset will not be submitted with the image, and other directories will be packaged into the image.
images.submit_tooltips = The code directory /tmp/code, dataset directory /tmp/dataset will not be submitted with the image, and other directories will be packaged into the image.
images.name_format_err=The format of image tag is wrong.
images.name_rule50 = Please enter letters, numbers, _ and - up to 50 characters and starts with a letter.
images.name_rule100 = Please enter letters, numbers, _ and - up to 100 characters and cannot end with a dash (-).
@@ -3491,15 +3494,17 @@ boot_file_must_python = The boot file must be a python file
stop_failed = Fail to stop the job, please try again later.
can_not_restart = The task was not scheduled successfully before, so it cannot be restart.
dataset_size_over_limit = The size of dataset exceeds limitation (%dGB)
model_size_over_limit = The size of model exceeds limitation (%dGB)
boot_file_must_python = The boot file must be a python file
boot_file_not_exist = The boot file is not exists.
branch_not_exists = The branch does not exist. Please refresh and select again.
dataset_number_over_limit = The dataset count exceed the limit
model_number_over_limit = The model count exceed the limit
result_cleared=The files of the task have been cleared, can not restart or retrain any more, please create a new task instead
model_not_exist=The model in the task does not exist or has been deleted
too_many_notebook=A user can have up to 5 debug tasks, please try again after delete some debug tasks.
can_not_stop_creating_job=AI task is creating, can not be stopped.
no_center_match=Can not match a AI center, please select other specification.
no_center_match=Can not match an AI center, please select other specification.

[common_error]
system_error = System error.Please try again later


+ 8
- 3
options/locale/locale_zh-CN.ini View File

@@ -420,8 +420,8 @@ change_email_address=修改邮箱地址
new_email_address=新邮箱地址
openi_community_really_awesome=启智社区 确实给力
protocol_header=请仔细阅读下方内容:
protocol_title=尊敬的启智用户
protocol_context=感谢您一直以来对Openl启智社区AI协作平台的支持。为了保障您的使用权益和确保网络安全,我们于2024年1月份更新了《Openl启智社区AI协作平台使用协议》。更新后的协议明确了用户<font color="#ff2525">禁止使用内网穿透工具</font>的条例。在您勾选同意后,便可以继续使用我们的服务。感谢您的合作与理解。
protocol_title=尊敬的启智用户
protocol_context=感谢您一直以来对Openl启智社区AI协作平台的支持。为了保障您的使用权益和确保网络安全,我们于2024年1月份更新了《Openl启智社区AI协作平台使用协议》。更新后的协议明确了用户<font color="#ff2525">禁止使用内网穿透工具</font>的条例。您单击“同意并继续”后,便可以继续使用我们的服务。感谢您的合作与理解。
protocol_context_sub=更多协议内容,请参考<u><font color="#3291f8"><a href="/home/term" target="_blank">《Openl启智社区AI协作平台使用协议》</a></font></u>
protocol_confirm=同意并继续
protocol_cancel=不同意,退出
@@ -572,6 +572,7 @@ all = 所有
form.name_reserved='%s' 用户名被保留。
form.name_pattern_not_allowed=用户名中不允许使用 "%s"。
form.name_chars_not_allowed=用户名 '%s' 包含无效字符。
form.username_and_invited_code_duplicated = 注册用户名和推荐人不能相同。

static.invitationdetailsheetname=用户邀请详细数据
static.invitationNum=邀请用户数
@@ -994,6 +995,7 @@ dataset_available_clusters = 可用集群
dataset_upload_time = 上传时间
download = 下载
modify_description = 修改描述
copy_code = 复制代码
set_public = 设为公开
set_private = 设为私有
annotation = 图片标注
@@ -1088,6 +1090,7 @@ language_other=其它
datasets=数据集
datasets.desc=数据集功能
cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等
code_use_resource=代码中如何访问数据资源

model_manager = 模型
model_base = 大模型基地
@@ -1150,7 +1153,7 @@ images.name_placerholder = 请输入镜像Tag
images.descr_placerholder = 描述字数不超过1000个字符
image.label_tooltips = 如Python 3.7, Tensorflow 2.0, cuda 10, pytorch 1.6
images.public_tooltips = 镜像设置为公开后,可被其他用户看到。
images.submit_tooltips = 代码目录/code,数据集目录/dataset不会随镜像提交,其他目录都会打包到镜像中。
images.submit_tooltips = 代码目录/tmp/code,数据集目录/tmp/dataset不会随镜像提交,其他目录都会打包到镜像中。
images.name_format_err=镜像Tag格式错误。
images.name_rule50 = 请输入字母、数字、_和-,最长50个字符,且以字母开头。
images.name_rule100 = 请输入字母、数字、_和-,最长100个字符,且不能以中划线(-)结尾。
@@ -3514,10 +3517,12 @@ boot_file_must_python = 启动文件必须是python文件
stop_failed = 任务停止失败,请稍后再试
can_not_restart = 这个任务之前没有调度成功,不能再次调试。
dataset_size_over_limit = 数据集大小超过限制(%dGB)
model_size_over_limit = 模型大小超过限制(%dGB)
boot_file_must_python = 启动文件必须是python文件
boot_file_not_exist =启动文件不存在
branch_not_exists = 代码分支不存在,请刷新后重试
dataset_number_over_limit = 选择的数据集文件数量超出限制
model_number_over_limit = 选择的模型数量超出限制
result_cleared=源任务的文件已被清理,无法再次调试或复用训练结果,请新建任务。
model_not_exist=选择的预训练模型不存在或者已被删除
too_many_notebook=每个用户最多只能创建5个调试任务,请删除历史任务再新建。


+ 9
- 0
routers/ai_task/ai_task.go View File

@@ -567,3 +567,12 @@ func handCreateReq(req *entity.CreateReq) {
req.WorkServerNumber = 1
}
}

func GenerateSDKCode(ctx *context.Context) {
datasetNames := ctx.QueryStrings("dataset_name")
pretrainModelNames := ctx.QueryStrings("pretrain_model_name")
parameterKeys := ctx.QueryStrings("param_key")
jobType := ctx.Query("job_type")
code := task.GenerateSDKCode(datasetNames, pretrainModelNames, parameterKeys, models.JobType(jobType))
ctx.JSON(http.StatusOK, response.OuterSuccessWithData(map[string]string{"code": code}))
}

+ 5
- 1
routers/api/v1/api.go View File

@@ -660,7 +660,6 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/creation/required", reqWeChatStandard(), reqRepoWriter(models.UnitTypeCloudBrain), ai_task.GetCreationRequiredInfo)
m.Get("/creation/image_by_spec", reqWeChatStandard(), reqRepoWriter(models.UnitTypeCloudBrain), ai_task.GetImageInfoBySelectedSpec)
m.Post("/output/reschedule", reqRepoWriter(models.UnitTypeCloudBrain), ai_task.RetryModelSchedule)

}, reqToken(), context.RepoRef())
m.Group("/ai_task", func() {
m.Get("", reqRepoReader(models.UnitTypeCloudBrain), ai_task.GetAITaskInfo)
@@ -677,6 +676,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/resource_usage", reqRepoReader(models.UnitTypeCloudBrain), reqAITaskInRepo(), ai_task.GetAITaskResourceUsage)
})
}, repoAssignment())

m.Group("/ai_task", func() {
m.Get("/generate_sdk_code", ai_task.GenerateSDKCode)
})

// Miscellaneous
if setting.API.EnableSwagger {
m.Get("/swagger", misc.Swagger)


+ 1
- 1
routers/repo/ai_model_convert.go View File

@@ -420,7 +420,7 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
log.Info("command=" + command)
codePath := setting.JobPath + modelConvert.ID + CodeMountPath
codeTmpPath := setting.JobPath + modelConvert.ID + CodeMountPath + "tmp"
uploader := storage_helper.SelectUploaderFromStorageType(entity.MINIO)
uploader := storage_helper.SelectStorageHelperFromStorageType(entity.MINIO)
codeRemoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(modelConvert.ID), "code")
log.Info("codePath=" + codePath)
log.Info("codeTmpPath=" + codeTmpPath)


+ 55
- 4
routers/repo/ai_model_manage.go View File

@@ -2,6 +2,10 @@ package repo

import (
"archive/zip"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/ai_model"
"code.gitea.io/gitea/services/cloudbrain/modelmanage"
"code.gitea.io/gitea/services/cloudbrain/resource"
"encoding/json"
"errors"
"fmt"
@@ -12,10 +16,6 @@ import (
"regexp"
"strings"

"code.gitea.io/gitea/services/cloudbrain/resource"

"code.gitea.io/gitea/services/cloudbrain/modelmanage"

"code.gitea.io/gitea/services/repository"

"code.gitea.io/gitea/models"
@@ -167,6 +167,7 @@ func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile st
insertModelFile(id)
}
}
ai_model.UpdateModelMeta(id)
}

func insertModelFile(id string) {
@@ -555,6 +556,7 @@ func DeleteModelFile(ctx *context.Context) {
ModelID: id,
}
models.DeleteModelFile(modelFile)
ai_model.UpdateModelMeta(id)
}
}

@@ -585,6 +587,55 @@ func DeleteModel(ctx *context.Context) {
}
}

func UpdateAllModelMeta(ctx *context.Context) {
log.Info("Start to update all model meta")
ids := ctx.QueryStrings("model_id")
updateAll := ctx.QueryBool("update_all")
if !updateAll && len(ids) == 0 {
ctx.JSON(http.StatusOK, response.OuterSuccessWithData(map[string]int{"count": 0}))
return
}
count := 0
if updateAll {
pageSize := 100
pageNum := 1
for {
ids, err := models.QueryModelIdsByPaging(pageSize, pageNum, "created_unix")
if err != nil {
log.Error("UpdateAllModelMeta QueryModelIdsByPaging err.%v", err)
ctx.JSON(http.StatusOK, response.OuterResponseError(err))
return
}
for _, id := range ids {
ai_model.UpdateModelMeta(id)
count++
}
if len(ids) < pageSize {
break
}
pageNum++
}

} else {
for _, id := range ids {
ai_model.UpdateModelMeta(id)
count++
}
}
ctx.JSON(http.StatusOK, response.OuterSuccessWithData(map[string]int{"count": count}))
}

func QueryModelMetaById(ctx *context.Context) {
log.Info("Start to query model meta")
id := ctx.Query("model_id")
data, err := ai_model.QueryModelMeta(id)
if err != nil {
ctx.JSON(http.StatusOK, response.OuterResponseError(err))
return
}
ctx.JSON(http.StatusOK, response.OuterSuccessWithData(map[string]interface{}{"meta_data": data}))
}

func deleteModelByID(ctx *context.Context, id string) error {
log.Info("delete model start. id=" + id)
model, err := models.QueryModelById(id)


+ 32
- 23
routers/repo/ai_model_square.go View File

@@ -39,6 +39,7 @@ type ModelMap struct {
RepoDisplayName string
RepoId int64
Model *models.AiModelManage
Models4Parent []*models.AiModelManage
Next []*ModelMap
}

@@ -317,36 +318,44 @@ func findParent(model *models.AiModelManage) *ModelMap {
} else {
log.Info("find parent model name." + task.ModelName)
if task.ModelName != "" {
result := &ModelMap{
Type: 1,
IsParent: true,
}
modelsArray := make([]*models.AiModelManage, 0)
if task.ModelId != "" {
parentModel, err := models.QueryModelById(task.ModelId)
setModelRepo(parentModel)
setModelUser(parentModel)
setModelDataSet(parentModel)
if err == nil {
re := &ModelMap{
Type: 1,
IsParent: true,
Model: parentModel,
modelIdArray := task.GetModelIdArray()
for i := 0; i < len(modelIdArray); i++ {
modelId := modelIdArray[i]
parentModel, err := models.QueryModelById(modelId)
setModelRepo(parentModel)
setModelUser(parentModel)
setModelDataSet(parentModel)
if err != nil {
return nil
}
return re
modelsArray = append(modelsArray, parentModel)
}
result.Models4Parent = modelsArray
return result
} else {
modelList := models.QueryModelByName(task.ModelName, task.RepoID)
if modelList != nil && len(modelList) > 0 {
for _, parentModel := range modelList {
setModelUser(parentModel)
setModelRepo(parentModel)
setModelDataSet(parentModel)
if parentModel.Version == task.ModelVersion {
re := &ModelMap{
Type: 1,
IsParent: true,
Model: parentModel,
modelNameArray := task.GetModelNameArray()
for i := 0; i < len(modelNameArray); i++ {
modelName := modelNameArray[i]
modelList := models.QueryModelByName(modelName, task.RepoID)
if modelList != nil && len(modelList) > 0 {
for _, parentModel := range modelList {
setModelUser(parentModel)
setModelRepo(parentModel)
setModelDataSet(parentModel)
if parentModel.Version == task.ModelVersion {
modelsArray = append(modelsArray, parentModel)
}
return re
}
}
}
result.Models4Parent = modelsArray
return result
}
}
}
@@ -383,7 +392,7 @@ func findChild(currentNode *ModelMap) {
log.Info("error=" + err.Error())
} else {
log.Info("task.ModelId=%v,currentModel.ID=%v", task.ModelId, currentModel.ID)
if task.ModelId != "" && task.ModelId == currentModel.ID {
if task.ModelId != "" && task.HasUseModel(currentModel.ID) {
setModelUser(childModel)
setModelDataSet(childModel)
modelMap := &ModelMap{


+ 4
- 0
routers/repo/aisafety.go View File

@@ -823,6 +823,10 @@ func createForNPU(ctx *context.Context, jobName string) error {
JobType: string(models.JobTypeModelSafety),
}

userCommand, userImageUrl := getInfJobUserCommand(engineID, req)
req.UserCommand = userCommand
req.UserImageUrl = userImageUrl

_, err = modelarts.GenerateInferenceJob(ctx, req)
if err != nil {
log.Error("GenerateTrainJob failed:%v", err.Error())


+ 5
- 2
routers/repo/attachment_model.go View File

@@ -1,6 +1,7 @@
package repo

import (
"code.gitea.io/gitea/services/ai_model"
"errors"
"fmt"
"path"
@@ -20,7 +21,7 @@ import (
func GetModelChunks(ctx *context.Context) {
fileMD5 := ctx.Query("md5")
typeCloudBrain := ctx.QueryInt("type")
fileName := ctx.Query("file_name")
//fileName := ctx.Query("file_name")
//scene := ctx.Query("scene")
modeluuid := ctx.Query("modeluuid")
log.Info(" typeCloudBrain=" + fmt.Sprint(typeCloudBrain))
@@ -85,6 +86,7 @@ func GetModelChunks(ctx *context.Context) {
if err == nil && model != nil {
modelname = model.Name
}
fileNameUploaded := strings.Split(fileChunk.ObjectName, modeluuid+"/")[1]
ctx.JSON(200, map[string]string{
"uuid": fileChunk.UUID,
"uploaded": strconv.Itoa(fileChunk.IsUploaded),
@@ -92,7 +94,7 @@ func GetModelChunks(ctx *context.Context) {
"chunks": string(chunks),
"attachID": "0",
"modeluuid": modeluuid,
"fileName": fileName,
"fileName": fileNameUploaded,
"modelName": modelname,
})
} else {
@@ -323,6 +325,7 @@ func CompleteModelMultipart(ctx *context.Context) {
}
//更新模型大小信息
UpdateModelSize(modeluuid, fileChunk.ObjectName)
ai_model.UpdateModelMeta(modeluuid)

ctx.JSON(200, map[string]string{
"result_code": "0",


+ 63
- 41
routers/repo/modelarts.go View File

@@ -68,53 +68,52 @@ func DebugJobIndex(ctx *context.Context) {
if listType == "" {
listType = models.AllResource
}
ctx.Data["ListType"] = listType
MustEnableCloudbrain(ctx)
repo := ctx.Repo.Repository
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}

jobTypeNot := false
var computeResource string
if listType != models.AllResource {
computeResource = listType
}

var jobTypes []string
jobTypes = append(jobTypes, string(models.JobTypeDebug))
ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.IssuePagingNum,
},
RepoID: repo.ID,
ComputeResource: computeResource,
Type: models.TypeCloudBrainAll,
JobTypeNot: jobTypeNot,
JobTypes: jobTypes,
})
if err != nil {
ctx.ServerError("Get debugjob faild:", err)
return
}

for i, task := range ciTasks {
ciTasks[i].CanDebug = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain)
ciTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain)
ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource
}

pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5)
pager.AddParam(ctx, "debugListType", "ListType")
ctx.Data["Page"] = pager
ctx.Data["ListType"] = listType
ctx.Data["PageIsCloudBrain"] = true
ctx.Data["Tasks"] = ciTasks
ctx.Data["CanCreate"] = cloudbrain.CanCreateOrDebugJob(ctx)
ctx.Data["RepoIsEmpty"] = repo.IsEmpty
ctx.Data["debugListType"] = listType
ctx.HTML(200, tplDebugJobIndex)

// page := ctx.QueryInt("page")
// if page <= 0 {
// page = 1
// }

// jobTypeNot := false
// var computeResource string
// if listType != models.AllResource {
// computeResource = listType
// }

// var jobTypes []string
// jobTypes = append(jobTypes, string(models.JobTypeDebug))
// ciTasks, count, err := models.Cloudbrains(&models.CloudbrainsOptions{
// ListOptions: models.ListOptions{
// Page: page,
// PageSize: setting.UI.IssuePagingNum,
// },
// RepoID: repo.ID,
// ComputeResource: computeResource,
// Type: models.TypeCloudBrainAll,
// JobTypeNot: jobTypeNot,
// JobTypes: jobTypes,
// })
// if err != nil {
// ctx.ServerError("Get debugjob faild:", err)
// return
// }

// for i, task := range ciTasks {
// ciTasks[i].CanDebug = cloudbrain.CanModifyJob(ctx, &task.Cloudbrain)
// ciTasks[i].CanDel = cloudbrain.CanDeleteJob(ctx, &task.Cloudbrain)
// ciTasks[i].Cloudbrain.ComputeResource = task.ComputeResource
// }

// pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5)
// pager.AddParam(ctx, "debugListType", "ListType")
}

// MustEnableDataset check if repository enable internal cb
@@ -502,7 +501,30 @@ func NotebookRestart(ctx *context.Context) {
var spec *models.Specification

task := ctx.Cloudbrain

if task.IsNewAITask() {
res, bizErr := ai_task.RestartAITask(task.ID, ctx.Repo.GitRepo, ctx.Repo.Repository, ctx.User)
if bizErr != nil {
log.Error("lRestartAITask failed:task.ID=%d err=%v", task.ID, bizErr.DefaultMsg)
errorMsg = ctx.Tr(bizErr.TrCode)
ctx.JSON(200, map[string]string{
"result_code": resultCode,
"error_msg": errorMsg,
"status": status,
"id": id,
})
return
}
id = strconv.FormatInt(res.ID, 10)
status = res.Status
resultCode = "0"
ctx.JSON(200, map[string]string{
"result_code": resultCode,
"error_msg": errorMsg,
"status": status,
"id": id,
})
return
}
lockOperator, errMsg := cloudbrainService.Lock4CloudbrainRestart(&lock.LockContext{Repo: ctx.Repo.Repository, Task: &models.Cloudbrain{JobType: task.JobType}, User: ctx.User})
defer func() {
if lockOperator != nil {


+ 1
- 15
routers/repo/view.go View File

@@ -846,31 +846,17 @@ func renderCode(ctx *context.Context) {
*/
baseGitRepo, err := git.OpenRepository(ctx.Repo.Repository.BaseRepo.RepoPath())
defer baseGitRepo.Close()
var compareInfo *git.CompareInfo
//var compareInfo *git.CompareInfo
if err != nil {
log.Error("error open baseRepo:%s", ctx.Repo.Repository.BaseRepo.RepoPath())
ctx.Data["FetchUpstreamCnt"] = -1 // minus value indicates error
} else {
if _, error := baseGitRepo.GetBranch(ctx.Repo.BranchName); error == nil {
//base repo has the same branch, then compare between current repo branch and base repo's branch
compareInfo, err = baseGitRepo.GetCompareInfo(ctx.Repo.Repository.RepoPath(), ctx.Repo.BranchName, ctx.Repo.BranchName)
ctx.Data["UpstreamSameBranchName"] = true
} else {
//else, compare between current repo branch and base repo's default branch
compareInfo, err = baseGitRepo.GetCompareInfo(ctx.Repo.Repository.RepoPath(), ctx.Repo.BranchName, ctx.Repo.Repository.BaseRepo.DefaultBranch)
ctx.Data["UpstreamSameBranchName"] = false
}
if err == nil && compareInfo != nil {
if compareInfo.Commits != nil {
log.Info("compareInfoCommits数量:%d", compareInfo.Commits.Len())
ctx.Data["FetchUpstreamCnt"] = compareInfo.Commits.Len()
} else {
log.Info("compareInfo nothing different")
ctx.Data["FetchUpstreamCnt"] = 0
}
} else {
ctx.Data["FetchUpstreamCnt"] = -1 // minus value indicates error
}
}
}
ctx.Data["Paths"] = paths


+ 3
- 1
routers/response/response_list.go View File

@@ -38,4 +38,6 @@ var MODEL_NUM_OVER_LIMIT = &BizError{Code: 2021, DefaultMsg: "The number of mode
var DATASET_NUMBER_OVER_LIMIT = &BizError{Code: 2022, DefaultMsg: "The dataset count exceed the limit", TrCode: "ai_task.dataset_number_over_limit"}
var NOTEBOOK_EXCEED_MAX_NUM = &BizError{Code: 2023, DefaultMsg: "You can have up to 5 Debug Tasks, please try again after delete some tasks. ", TrCode: "ai_task.too_many_notebook"}
var CAN_NOT_STOP_CREATING_JOB = &BizError{Code: 2024, DefaultMsg: "AI task is creating, can not be stopped", TrCode: "ai_task.can_not_stop_creating_job"}
var NO_CENTER_MATCH = &BizError{Code: 2024, DefaultMsg: "", TrCode: "ai_task.no_center_match"}
var NO_CENTER_MATCH = &BizError{Code: 2024, DefaultMsg: "Can not match an AI center, please select other specification.", TrCode: "ai_task.no_center_match"}
var MODEL_NUMBER_OVER_LIMIT = &BizError{Code: 2025, DefaultMsg: "The model count exceed the limit", TrCode: "ai_task.model_number_over_limit"}
var MODEL_SIZE_OVER_LIMIT = &BizError{Code: 2026, DefaultMsg: "The size of model exceeds limitation", TrCode: "ai_task.model_size_over_limit"}

+ 7
- 3
routers/routes/routes.go View File

@@ -377,11 +377,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/main_query_data", repo.ModelSquareData)
m.Put("/modify_model_collect", repo.ModelCollect)
m.Get("/main_query_label", repo.QueryModelLabel)
})
}, ignSignIn)

m.Group("/extension", func() {
// m.Get("", modelapp.ModelMainPage)
m.Get("/mind", modelapp.MindPage)
m.Get("/mind", ignSignIn, modelapp.MindPage)
m.Get("/tuomin/upload", modelapp.ProcessImageUI)
m.Post("/tuomin/upload", reqSignIn, modelapp.ProcessImage)
m.Get("/wenxin", modelapp.WenXinPage)
@@ -399,7 +399,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/create", reqSignIn, reqWechatBind, modelapp.PanguFinetuneCreateUI)
m.Get("/inference", reqSignIn, modelapp.PanguInferenceUI)
})
})
}, ignSignIn)
})

m.Group("/computingpower", func() {
@@ -741,6 +741,10 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/update/:id", binding.BindIgnErr(models.ResourceSceneReq{}), admin.UpdateResourceScene)
})
})
m.Group("/ai_model", func() {
m.Post("/update_version", repo.UpdateAllModelMeta)
m.Get("/query_meta", repo.QueryModelMetaById)
})
}, adminReq)
// ***** END: Admin *****



+ 9
- 6
routers/user/Invitation.go View File

@@ -1,6 +1,7 @@
package user

import (
"code.gitea.io/gitea/modules/notification"
"errors"
"strconv"
"strings"
@@ -63,14 +64,14 @@ func InviationTpl(ctx *context.Context) {
ctx.HTML(200, tplInvitation)
}

func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhoneNumber string, email string) error {
func RegisteUserByInvitaionCode(invitationcode string, newUser *models.User) error {
user := parseInvitaionCode(invitationcode)
if user == nil {
return errors.New("The invitated user not existed.")
}

if newPhoneNumber != "" {
re := models.QueryInvitaionByPhone(newPhoneNumber)
if newUser.PhoneNumber != "" {
re := models.QueryInvitaionByPhone(newUser.PhoneNumber)
if re != nil {
if len(re) > 0 {
log.Info("The phone has been invitated. so ingore it.")
@@ -83,14 +84,16 @@ func RegisteUserByInvitaionCode(invitationcode string, newUserId int64, newPhone

invitation := &models.Invitation{
SrcUserID: user.ID,
UserID: newUserId,
Phone: newPhoneNumber,
Email: email,
UserID: newUser.ID,
Phone: newUser.PhoneNumber,
Email: newUser.Email,
}

err := models.InsertInvitaion(invitation)
if err != nil {
log.Info("insert error," + err.Error())
} else {
notification.NotifyInviteFriendRegister(user, newUser)
}
return err
}


+ 9
- 1
routers/user/auth.go View File

@@ -1434,6 +1434,11 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo
}
}

if strings.ToLower(invitationCode) == strings.ToLower(form.UserName) {
ctx.RenderWithErr(ctx.Tr("user.form.username_and_invited_code_duplicated"), tplSignUp, &form)
return
}

if !form.IsEmailDomainWhitelisted() {
ctx.RenderWithErr(ctx.Tr("auth.email_domain_blacklisted"), tplSignUp, &form)
return
@@ -1499,7 +1504,10 @@ func SignUpPost(ctx *context.Context, cpt *captcha.Captcha, form auth.RegisterFo

log.Info("enter here, and form.InvitaionCode =" + invitationCode)
if invitationCode != "" {
RegisteUserByInvitaionCode(invitationCode, u.ID, u.PhoneNumber, u.Email)
tmpErr := RegisteUserByInvitaionCode(invitationCode, u)
if tmpErr != nil {
log.Error("RegisteUserByInvitaionCode err.u=%+v invitationCode=%s", u, invitationCode)
}
}

err := models.AddEmailAddress(&models.EmailAddress{


+ 74
- 0
services/ai_model/model_version.go View File

@@ -0,0 +1,74 @@
package ai_model

import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/obs"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"encoding/json"
"errors"
"io"
"strings"
"time"
)

const versionKey = "data_version"
const versionFileName = "openi_resource.version"

//UpdateModelMeta 更新模型的版本信息
func UpdateModelMeta(modelId string) error {
lock := redis_lock.NewDistributeLock(redis_key.AIModelMetaUpdateLock(modelId))
success, err := lock.LockWithWait(3*time.Second, 3*time.Second)
if err != nil {
return err
}
if !success {
return errors.New("InitModelMeta err")
}
defer lock.UnLock()
m, err := models.QueryModelById(modelId)
if err != nil {
return err
}
info := map[string]interface{}{versionKey: time.Now().Unix()}
metaInfo, _ := json.Marshal(info)
input := &obs.PutObjectInput{}
input.Bucket = setting.Bucket
input.Key = m.Path[len(setting.Bucket)+1:] + versionFileName
input.Body = strings.NewReader(string(metaInfo))
_, err = storage.ObsCli.PutObject(input)
return err
}

func QueryModelMeta(modelId string) (string, error) {
m, err := models.QueryModelById(modelId)
if err != nil {
log.Error("QueryModelMeta QueryModelById err.%v")
return "", err
}
input := &obs.GetObjectInput{}
input.Bucket = setting.Bucket
input.Key = m.Path[len(setting.Bucket)+1:] + versionFileName
data, err := storage.ObsCli.GetObject(input)

if err != nil {
log.Error("QueryModelMeta GetObject err.%v")
return "", err
}
if data == nil {
return "", nil
}
s, _ := io.ReadAll(data.Body)
return string(s), nil
}

func InitModelMeta(modelId string) {
data, _ := QueryModelMeta(modelId)
if data != "" {
return
}
UpdateModelMeta(modelId)
}

+ 66
- 233
services/ai_task_service/cluster/c2net.go View File

@@ -11,10 +11,8 @@ import (
"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/manager/client/grampus"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
model_grampus "code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
@@ -57,7 +55,7 @@ func (c C2NetClusterAdapter) CreateOnlineInfer(req entity.CreateNoteBookTaskRequ
log.Error("CreateOnlineInfer err.req=%+v err=%v", req, err)
return nil, err
}
jobResult, err := grampus.CreateNotebookJob(newReq)
jobResult, err := grampus.CreateInferenceJob(newReq)
if err != nil {
log.Error("CreateNoteBook failed: %v", err.Error())
return nil, err
@@ -127,10 +125,11 @@ func convertNoteBookReq2Grampus(req entity.CreateNoteBookTaskRequest) (models.Cr
codePath = codePath[0:strings.LastIndex(codePath, "/")]
}
}
var commandGpuDebug = "mkdir -p /dataset;! [ -x \"$(command -v jupyter)\" ] && pip install jupyterlab==3 -i https://pypi.tuna.tsinghua.edu.cn/simple;jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='%s' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"

var commandGpuDebug = "mkdir -p /tmp/dataset;jupyter lab --ServerApp.shutdown_no_activity_timeout=%s --TerminalManager.cull_inactive_timeout=%s --TerminalManager.cull_interval=%s --MappingKernelManager.cull_idle_timeout=%s --MappingKernelManager.cull_interval=%s --MappingKernelManager.cull_connected=True --MappingKernelManager.cull_busy=True --no-browser --ip=0.0.0.0 --allow-root --notebook-dir='%s' --port=$OCTOPUS_NOTEBOOK_PORT --LabApp.token='' --LabApp.allow_origin='*' --LabApp.base_url=$OCTOPUS_NOTEBOOK_BASE_URL;"
command := fmt.Sprintf(commandGpuDebug, setting.CullIdleTimeout, setting.CullIdleTimeout, setting.CullInterval, setting.CullIdleTimeout, setting.CullInterval, codePath)
if models.DCU == req.Tasks[0].Spec.ComputeResource {
command = "cp -r /code /tmp;cp -r /dataset /tmp;cp -r /pretrainmodel /tmp;"
command = ""
}
if models.NPU == req.Tasks[0].Spec.ComputeResource {
command = ""
@@ -149,21 +148,22 @@ func convertNoteBookReq2Grampus(req entity.CreateNoteBookTaskRequest) (models.Cr
return models.CreateGrampusNotebookRequest{Name: req.Name, Tasks: tasks}, nil
}

func convertOnlineInfer2Grampus(req entity.CreateNoteBookTaskRequest) (models.CreateGrampusNotebookRequest, error) {
func convertOnlineInfer2Grampus(req entity.CreateNoteBookTaskRequest) (models.CreateGrampusInferenceRequest, error) {

command := generateCommand(req.RepoName, req.Tasks[0].BootFile, req.PrimitiveDatasetName)
command := ""
//:= generateCommand(req.RepoName, req.Tasks[0].BootFile, req.PrimitiveDatasetName)

tasks := make([]models.GrampusNotebookTask, len(req.Tasks))
tasks := make([]models.GrampusInferenceTask, len(req.Tasks))
for i := 0; i < len(req.Tasks); i++ {
t := req.Tasks[i]
task, err := convertNoteBookTask2Grampus(t, command)
task, err := convertOnlineInference2Grampus(t, command)
if err != nil {
return models.CreateGrampusNotebookRequest{}, nil
return models.CreateGrampusInferenceRequest{}, nil
}
tasks[i] = task
}

return models.CreateGrampusNotebookRequest{Name: req.Name, Tasks: tasks}, nil
return models.CreateGrampusInferenceRequest{Name: req.Name, Tasks: tasks}, nil
}

func generateCommand(repoName, bootFile, datasetName string) string {
@@ -191,7 +191,8 @@ func generateCommand(repoName, bootFile, datasetName string) string {
copyDatasetPath := "/code/" + strings.ToLower(repoName) + "/" + bootfilepath
commandUnzip := "export OPENI_GRADIO_URL=$OCTOPUS_NOTEBOOK_BASE_URL;" + "cd " + workDir + "code;echo \"start unzip code\";unzip -q master.zip; " + copyDatasetCmd + " echo \"start to unzip dataset\";cd " + copyDatasetPath + "; " + unZipDatasetCommand
//commandUnzip := "cd " + workDir + "code;echo \"start unzip code\";unzip -q master.zip;echo \"start to unzip dataset\";cd " + workDir + "dataset;" + unZipDatasetCommand
command += commandUnzip

command = command + commandUnzip
command += "echo \"unzip finished;start to exec code;\";"
if strings.HasSuffix(bootonlyfile, ".py") {
bootonlyfile = bootonlyfile[0 : len(bootonlyfile)-3]
@@ -212,6 +213,41 @@ func getCopyCmd(datasetName, repoName, bootfilepath string) string {
return cmd
}

func convertOnlineInference2Grampus(t entity.NoteBookTask, command string) (models.GrampusInferenceTask, error) {
code := models.GrampusDataset{}
codeArray := convertContainerArray2GrampusArray(t.Code)
if codeArray != nil && len(codeArray) > 0 {
code = codeArray[0]
}
output := models.GrampusDataset{}
outputArray := convertContainerArray2GrampusArray(t.OutPut)
if outputArray != nil && len(outputArray) > 0 {
output = outputArray[0]
}
centerIds, err := getGrampusAvailableCenterIds(t.Queues, t.ImageId, *models.GetComputeSourceInstance(t.Spec.ComputeResource), models.JobTypeDebug)
log.Info("check centerIds getGrampusAvailableCenterIds ImageId=%s queues=%v centerIds=%v", t.ImageId, t.Queues, centerIds)
if err != nil {
log.Error("check centerIds getGrampusAvailableCenterIds err.%v", err)
return models.GrampusInferenceTask{}, err
}
return models.GrampusInferenceTask{
Name: t.Name,
ResourceSpecId: t.Spec.SourceSpecId,
ImageId: t.ImageId,
ImageUrl: t.ImageUrl,
Datasets: convertContainerArray2GrampusArray(t.Datasets),
PreTrainModel: convertContainerArray2GrampusArray(t.PreTrainModel),
Code: code,
OutPut: output,
EnvVariables: t.EnvVariables,
AutoStopDuration: t.AutoStopDuration,
Capacity: t.Capacity,
Command: command,
CenterID: centerIds,
BootFile: t.BootFile,
}, nil
}

func convertNoteBookTask2Grampus(t entity.NoteBookTask, command string) (models.GrampusNotebookTask, error) {
code := models.GrampusDataset{}
codeArray := convertContainerArray2GrampusArray(t.Code)
@@ -233,8 +269,10 @@ func convertNoteBookTask2Grampus(t entity.NoteBookTask, command string) (models.
ImageId: t.ImageId,
ImageUrl: t.ImageUrl,
Datasets: convertContainerArray2GrampusArray(t.Datasets),
PreTrainModel: convertContainerArray2GrampusArray(t.PreTrainModel),
Code: code,
OutPut: output,
EnvVariables: t.EnvVariables,
AutoStopDuration: t.AutoStopDuration,
Capacity: t.Capacity,
Command: command,
@@ -307,6 +345,15 @@ func convertContainerArray2Grampus(containerDatas []entity.ContainerData) models
return res
}

func convertParameters2Grampus(parameters models.Parameters) map[string]interface{} {
req := make(map[string]interface{})
for _, param := range parameters.Parameter {
req[param.Label] = param.Value
}

return req
}

func convertContainer2Grampus(d entity.ContainerData) models.GrampusDataset {
return models.GrampusDataset{
Name: d.Name,
@@ -318,6 +365,7 @@ func convertContainer2Grampus(d entity.ContainerData) models.GrampusDataset {
GetBackEndpoint: d.GetBackEndpoint,
Size: d.Size,
IsOverwrite: d.IsOverwrite,
IsNeedUnzip: d.IsNeedUnzip,
}
}

@@ -498,7 +546,7 @@ func (c C2NetClusterAdapter) CreateTrainJob(req entity.CreateTrainTaskRequest) (
}

func convertTrainReq2Grampus(req entity.CreateTrainTaskRequest) (models.CreateGrampusJobRequest, error) {
command := generateGrampusTrainCommand(req)
command := ""

tasks := make([]models.GrampusTasks, len(req.Tasks))
for i := 0; i < len(req.Tasks); i++ {
@@ -513,222 +561,6 @@ func convertTrainReq2Grampus(req entity.CreateTrainTaskRequest) (models.CreateGr
return models.CreateGrampusJobRequest{Name: req.Name, Tasks: tasks}, nil
}

func generateGrampusTrainCommand(req entity.CreateTrainTaskRequest) string {
t := req.Tasks[0]
containerConfig := req.TaskConfig
computeResource := t.Spec.ComputeResource
var codePath = containerConfig.GetContainerPath(entity.ContainerCode)
var modelPath = containerConfig.GetContainerPath(entity.ContainerPreTrainModel)
var datasetPath = containerConfig.GetContainerPath(entity.ContainerDataset)
var outputPath = containerConfig.GetContainerPath(entity.ContainerOutPutPath)

var modelFilePath = ""
if t.PreTrainModel != nil && len(t.PreTrainModel) > 0 {
modelFilePath = t.PreTrainModel[0].ContainerPath
}
builder := &entity.CommandBuilder{}
builder.
//mkdir dirs
Add(buildMkdirCommand(codePath, modelPath, datasetPath, outputPath)).
//unzip code
Add(buildUnzipCodeCommand(codePath, t.Code[0].ContainerPath, computeResource)).
//unzip dataset
Add(buildUnzipDatasetCommand(t.Datasets, datasetPath, computeResource)).
//export
Add(buildExportCommand(req.Name, computeResource)).
//exec code
Add(buildExecCodeCommand(path.Join(codePath, strings.ToLower(t.RepoName)), modelFilePath, t.BootFile, computeResource, req.Name, t.Params, t.Datasets, datasetPath))

return builder.ToString()
}

func buildMkdirCommand(dirs ...string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
for _, dir := range dirs {
builder.Next(entity.NewCommand("mkdir", "-p", dir))
}
return builder
}

func buildUnzipCodeCommand(codeConfigPath, codeFilePath, computeSource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
if computeSource == models.NPU {
return builder
}
builder.
Next(entity.NewCommand("echo", "'start to unzip code'")).
Next(entity.NewCommand("cd", codeConfigPath)).
Next(entity.NewCommand("unzip", "-q", codeFilePath)).
Next(entity.NewCommand("echo", "'unzip code finished'")).
Next(entity.NewCommand("ls", "-l")).
Next(entity.NewCommand("ls", "-l", "mnist_pytorchexample_gpu"))
return builder
}
func buildUnzipDatasetCommand(datasets []entity.ContainerData, datasetPath, computeSource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
if computeSource == models.NPU {
return builder
}
if len(datasets) == 0 {
return nil
}
builder.Next(entity.NewCommand("cd", datasetPath)).
Next(entity.NewCommand("echo", "'start to unzip datasets'"))

fileDatasets := make([]entity.ContainerData, 0)
for _, dataset := range datasets {
if !dataset.IsDir {
fileDatasets = append(fileDatasets, dataset)
}
}
//单数据集
if len(fileDatasets) == 1 {
if strings.HasSuffix(fileDatasets[0].Name, ".tar.gz") {
builder.Next(entity.NewCommand("tar", "--strip-components=1", "-zxvf", "'"+fileDatasets[0].Name+"'"))
} else {
builder.Next(entity.NewCommand("unzip", "-q", "'"+fileDatasets[0].Name+"'"))
}
builder.Next(entity.NewCommand("ls", "-l"))
builder.Next(entity.NewCommand("echo", "'unzip datasets finished'"))
return builder
}
//多数据集
for i := 0; i < len(fileDatasets); i++ {
name := fileDatasets[i].Name
if strings.HasSuffix(name, ".tar.gz") {
builder.Next(entity.NewCommand("tar", "-zxvf", name))
} else {
builder.Next(entity.NewCommand("unzip", "-q", "'"+name+"'", "-d", "'./"+strings.TrimSuffix(name, ".zip")+"'"))
}
}
builder.Next(entity.NewCommand("ls", "-l"))
builder.Next(entity.NewCommand("echo", "'unzip datasets finished'"))
return builder
}

func buildDeleteUnzipDatasetCommand(builder *entity.CommandBuilder, datasets []entity.ContainerData, datasetPath, computeSource string) {
if computeSource == models.NPU {
return
}
if len(datasets) == 0 {
return
}
builder.Next(entity.NewCommand("cd", datasetPath)).
Next(entity.NewCommand("echo", "'start to delete unzip datasets'"))

fileDatasets := make([]entity.ContainerData, 0)
for _, dataset := range datasets {
if !dataset.IsDir {
fileDatasets = append(fileDatasets, dataset)
}
}
//单数据集
if len(fileDatasets) == 1 {

builder.Next(entity.NewCommand("find . ! -name", "'"+fileDatasets[0].Name+"'", "-type f -exec rm -f {} +"))
builder.Next(entity.NewCommand("find . -type d ! -name", "'"+fileDatasets[0].Name+"'", "-and ! -name . -and ! -name .. -exec rm -rf {} +"))

} else {
//多数据集
for i := 0; i < len(fileDatasets); i++ {

builder.Next(entity.NewCommand("rm", "-rf", "'"+getZipFileNameExcludeExt(fileDatasets[i].Name)+"'"))

}
}
builder.Next(entity.NewCommand("ls", "-l"))
builder.Next(entity.NewCommand("echo", "'delete unzip datasets finished'"))
}

func getZipFileNameExcludeExt(fileName string) string {
if strings.HasSuffix(fileName, ".tar.gz") {
return fileName[0 : len(fileName)-7]
} else if strings.HasSuffix(fileName, ".zip") {
return fileName[0 : len(fileName)-4]
}
return fileName
}

func buildExportCommand(jobName, computeResource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}

if computeResource == models.NPU {
outputRemotePath := setting.CodePathPrefix + jobName + modelarts.OutputPath
builder.Next(entity.NewCommand("export", "bucket="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
} else {
outputRemotePath := setting.CBCodePathPrefix + jobName + cloudbrain.ModelMountPath + "/"
builder.Next(entity.NewCommand("export", "env="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
}
return builder
}

func buildExecCodeCommand(codeDirPath, modelFilePath, bootFile, computeResource, jobName string, params models.Parameters, datasets []entity.ContainerData, datasetPath string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
builder.Next(entity.NewCommand("echo", "'start to exec code'"))

var paramCode string
for _, param := range params.Parameter {
paramCode += " --'" + param.Label + "'='" + param.Value + "'"
}
if computeResource == models.NPU {
builder.Next(entity.NewCommand("source", "/home/ma-user/.bashrc")).
Next(entity.NewCommand("export", "GLOG_v=3")).
Next(entity.NewCommand("export", "ASCEND_GLOBAL_LOG_LEVEL=3")).
Next(entity.NewCommand("export", "ASCEND_SLOG_PRINT_TO_STDOUT=0 ")).
Next(entity.NewCommand("export", "HCCL_CONNECT_TIMEOUT=3600")).
Next(entity.NewCommand("export", "HCCL_EXEC_TIMEOUT=1800")).
Next(entity.NewCommand("export", "PIPELINE_SLICE_SKIP_REDISTRIBUTION=1")).
Next(entity.NewCommand("export", "MS_DEV_REDUNDANCY_TASK_NUM=4")).
Next(entity.NewCommand("export", "MS_DEV_CELL_REUSE=2")).
Next(entity.NewCommand("python", "/home/ma-user/davinci/train/davincirun.py", "python", "/home/ma-user/grampus.py", paramCode))
} else if computeResource == models.GCU {
builder.Next(entity.NewCommand("cd", codeDirPath))
if modelFilePath != "" {
builder.Next(entity.NewCommand("python3", bootFile, paramCode, "--ckpt_url='"+modelFilePath+"'"))
} else {
builder.Next(entity.NewCommand("python3", bootFile, paramCode))
}
} else {
builder.Next(entity.NewCommand("cd", codeDirPath))
if modelFilePath != "" {
builder.Next(entity.NewCommand("python", bootFile, paramCode, "--ckpt_url='"+modelFilePath+"'"))
} else {
builder.Next(entity.NewCommand("python", bootFile, paramCode))
}
}

builder.Next(entity.NewCommand("result=$?"))
//delete unzip dataset
buildDeleteUnzipDatasetCommand(builder, datasets, datasetPath, computeResource)

builder.Next(entity.NewCommand("bash", "-c", "\"[[ $result -eq 0 ]] && exit 0 || exit -1\""))
return builder
}

func buildParamCommand(outputRemotePath, computeResource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
builder.Next(entity.NewCommand("echo", "'start to exec code'"))

if computeResource == models.NPU {
builder.Next(entity.NewCommand("export", "bucket="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
} else {
builder.Next(entity.NewCommand("export", "env="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
}

return builder
}

var BucketRemote = "grampus"
var RemoteModelPath = "/output"

func getNpuModelRemoteObsUrl(jobName string) string {
return "s3:///" + BucketRemote + "/" + getNpuModelObjectKey(jobName)
}

func getNpuModelObjectKey(jobName string) string {
return setting.CodePathPrefix + jobName + RemoteModelPath + "/" + models.ModelSuffix
}

func convertTrainTask2Grampus(t entity.TrainTask, command string) (models.GrampusTasks, error) {
centerIds, err := getGrampusAvailableCenterIds(t.Queues, t.ImageId, *models.GetComputeSourceInstance(t.Spec.ComputeResource), models.JobTypeTrain)
if err != nil {
@@ -749,6 +581,7 @@ func convertTrainTask2Grampus(t entity.TrainTask, command string) (models.Grampu
BootFile: t.BootFile,
OutPut: convertContainerArray2Grampus(t.OutPut),
WorkServerNumber: t.WorkServerNumber,
RunParams: convertParameters2Grampus(t.Params),
}, nil
}

@@ -869,7 +702,7 @@ func (c C2NetClusterAdapter) GetLog(opts entity.ClusterLogOpts) (*entity.Cluster
}

func getOnlineInferenceLog(opts entity.ClusterLogOpts) string {
helper := storage_helper.SelectUploaderFromStorageType(entity.MINIO)
helper := storage_helper.SelectStorageHelperFromStorageType(entity.MINIO)
//查找日志文件
files := getLogFilesInStorage(helper, helper.GetJobDefaultObjectKeyPrefix(opts.JobName)+"/model", ".txt")
if len(files) == 0 {
@@ -924,7 +757,7 @@ func (c C2NetClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDownloadIn
}

func (c C2NetClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
url, err := helper.GetSignedDownloadUrl(opts.Path)
if err != nil {
log.Error("GetSignedDownloadUrl err.opts=%+v,err =%v", opts, err)
@@ -1040,7 +873,7 @@ func (c C2NetClusterAdapter) GetOutput(opts entity.ClusterOutputOpts) (*entity.C
}, nil
}

helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetOneLevelObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetOneLevelObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)
@@ -1063,7 +896,7 @@ func (c C2NetClusterAdapter) GetAllOutput(opts entity.ClusterOutputOpts) (*entit
return &entity.AllAITaskOutput{FileList: []storage.FileInfo{}}, nil
}

helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetAllObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetOneLevelObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)


+ 14
- 13
services/ai_task_service/cluster/cloudbrain_one.go View File

@@ -2,18 +2,19 @@ package cluster

import "C"
import (
"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"path"
"strings"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
)

type CloudbrainOneClusterAdapter struct {
@@ -236,7 +237,7 @@ func parseDiagnosticsToOperationProfile(appExitDiagnostics string, exitDiagnosti
func (c CloudbrainOneClusterAdapter) CreateTrainJob(req entity.CreateTrainTaskRequest) (*entity.CreateTrainTaskResponse, error) {
jobResult, err := cloudbrain.CreateJob(req.Name, convertTrainJobReq2CloudbrainOne(req))
if err != nil {
log.Error("CreateNoteBook failed: %v", err.Error())
log.Error("CreateTrainJob failed: %v", err.Error())
return nil, err
}
return convertCloudbrainOne2TrainJobRes(jobResult), nil
@@ -348,7 +349,7 @@ func (c CloudbrainOneClusterAdapter) GetLog(opts entity.ClusterLogOpts) (*entity
//获取任务退出信息
existStr := getCloudbrainOneExitDiagnostics(opts.JobId)

helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)

//查找日志文件
files := getLogFilesInStorage(helper, opts.ObjectKeyPrefix, "log.txt")
@@ -389,7 +390,7 @@ func (c CloudbrainOneClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDo
//获取任务退出信息
existStr := getCloudbrainOneExitDiagnostics(opts.JobId)

helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)

//查找日志文件
files := getLogFilesInStorage(helper, opts.ObjectKeyPrefix, "log.txt")
@@ -428,7 +429,7 @@ func (c CloudbrainOneClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDo
}

func (c CloudbrainOneClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
url, err := helper.GetSignedDownloadUrl(opts.Path)
if err != nil {
log.Error("GetSignedDownloadUrl err.opts=%+v,err =%v", opts, err)
@@ -492,7 +493,7 @@ func getCloudbrainOneExitDiagnostics(jobId string) string {
return ""
}

//findStartAndEnd 基于baseLine,根据方向向上或者向下计算
// findStartAndEnd 基于baseLine,根据方向向上或者向下计算
func findStartAndEnd(opts entity.ClusterLogOpts, filePath string, helper storage_helper.StorageHelper) (startLine int64, endLine int64) {
baseLine := opts.BaseLine
if opts.Direction == entity.UP {
@@ -517,7 +518,7 @@ func (c CloudbrainOneClusterAdapter) GetTrainJobOperationProfile(jobId string) (
}

func (c CloudbrainOneClusterAdapter) GetOutput(opts entity.ClusterOutputOpts) (*entity.ClusterAITaskOutput, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetOneLevelObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetOneLevelObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)
@@ -531,7 +532,7 @@ func (c CloudbrainOneClusterAdapter) GetOutput(opts entity.ClusterOutputOpts) (*
}

func (c CloudbrainOneClusterAdapter) GetAllOutput(opts entity.ClusterOutputOpts) (*entity.AllAITaskOutput, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetAllObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetAllObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)


+ 62
- 12
services/ai_task_service/cluster/cloudbrain_two.go View File

@@ -31,6 +31,17 @@ func init() {
func (c CloudbrainTwoClusterAdapter) CreateNoteBook(req entity.CreateNoteBookTaskRequest) (*entity.CreateNoteBookTaskResponse, error) {
t := req.Tasks[0]

appUrl := JointCloudbrainTwoReqUrl(t.Code)
if appUrl != "" {
appUrl = "s3:/" + appUrl
}
trainUrl := JointCloudbrainTwoReqUrl(t.OutPut)
if trainUrl != "" {
trainUrl = "s3:/" + trainUrl
}
datasetUrl := getCloudbrainTwoMultiDataUrl(t.Datasets)
multiModelUrl := getCloudbrainTwoModelUrl(t.PreTrainModel)

var jobResult *models.CreateNotebookResult
var err error
if setting.ModelartsCD.Enabled {
@@ -62,6 +73,20 @@ func (c CloudbrainTwoClusterAdapter) CreateNoteBook(req entity.CreateNoteBookTas
Category: models.EVSCategory,
Ownership: models.ManagedOwnership,
},
EnvVariables: models.CloudBrain2EnvVarReq{
CodeObsUrl: appUrl,
DatasetObsUrl: datasetUrl,
PretrainedModelObsUrl: multiModelUrl,
OutputObsUrl: trainUrl,
LocalCodePath: models.LocalCodePath,
LocalDatasetPath: models.LocalDatasetPath,
LocalPretrainModelPath: models.LocalPretrainModelPath,
LocalOutputPath: models.LocalOutputPath,
DataDownloadMethod: models.DataDownloadMethodMoxing,
CodeNeedUnzip: models.CodeNeedUnzipTrue,
DatasetNeedUnzip: models.DatasetNeedUnzipTrue,
PretrainModelNeedUnzip: models.PretrainModelNeedUnzipFalse,
},
WorkspaceID: "0",
})
}
@@ -229,13 +254,20 @@ func (c CloudbrainTwoClusterAdapter) QueryNoteBook(opts entity.JobIdAndVersionId
log.Error("GetNotebook(%s) from cloudbrain 2 failed:result is empty", task.DisplayJobName)
return nil, errors.New("result is empty")
}
return convertCloudbrainTwo2QueryRes(result), nil
return convertCloudbrainTwo2QueryRes(result, task), nil
}

func convertCloudbrainTwo2QueryRes(res *models.GetNotebook2Result) *entity.QueryTaskResponse {
func convertCloudbrainTwo2QueryRes(res *models.GetNotebook2Result, task *models.Cloudbrain) *entity.QueryTaskResponse {
startedAt := timeutil.TimeStamp(0)
if res.Lease.UpdateTime > 0 {
startedAt = timeutil.TimeStamp(res.Lease.UpdateTime / 1000)

if task.IsRestartTask() {
if res.Lease.UpdateTime > 0 {
startedAt = timeutil.TimeStamp(res.Lease.UpdateTime / 1000)
}
} else {
if res.Lease.CreateTime > 0 {
startedAt = timeutil.TimeStamp(res.Lease.CreateTime / 1000)
}
}
completedAt := timeutil.TimeStamp(0)
if models.IsCloudbrainTerminalStatus(res.Status) {
@@ -516,7 +548,25 @@ func getCloudbrainTwoUserCommand(appUrl, bootFile, dataUrl, trainUrl string, par
if len(tmpCodeObsPaths) > 0 {
lastCodeDir = tmpCodeObsPaths[len(tmpCodeObsPaths)-1]
}
userCommand = "/bin/bash /home/work/run_train.sh 's3://" + appUrl + "' '" + lastCodeDir + "/" + bootFile + "' '/tmp/log/train.log' --'data_url'='s3://" + dataUrl + "' --'train_url'='s3://" + trainUrl + "'"
var multi_data_url string
var pretrain_url string
for _, param := range params.Parameter {
if param.Label == "multi_data_url" {
multi_data_url = string(param.Value)
}
if param.Label == "pretrain_url" {
pretrain_url = string(param.Value)
}
}
//配置环境变量,适配c2net的sdk用于训练脚本获取代码,数据集,模型等
var envCodeCommand = "export CODE_URL=" + "s3://" + appUrl + ";" + "export LOCAL_CODE_PATH=/cache/code;"
var envDataCommand = "export DATASET_URL=" + "'" + multi_data_url + "'" + ";" + "export LOCAL_DATASET_PATH=/cache/dataset;"
var envPretrainCommand = "export PRETRAIN_MODEL_URL=" + "'" + pretrain_url + "'" + ";" + "export LOCAL_PRETRAIN_MODEL_PATH=/cache/pretrainmodel;"
var envOutputCommand = "export OUTPUT_URL=" + "s3://" + trainUrl + ";" + "export LOCAL_OUTPUT_PATH=/cache/output;"
var envMoxingCommand = "export DATA_DOWNLOAD_METHOD=MOXING;"
var envNeedUnzipCommand = "export CODE_NEED_UNZIP=false;export DATASET_NEED_UNZIP=false;export PRETRAIN_MODEL_NEED_UNZIP=false;"
var envCommand = envCodeCommand + envDataCommand + envPretrainCommand + envOutputCommand + envMoxingCommand + envNeedUnzipCommand
userCommand = envCommand + "/bin/bash /home/work/run_train.sh 's3://" + appUrl + "' '" + lastCodeDir + "/" + bootFile + "' '/tmp/log/train.log' --'data_url'='s3://" + dataUrl + "' --'train_url'='s3://" + trainUrl + "'"
for _, param := range params.Parameter {
userCommand += " --'" + param.Label + "'='" + param.Value + "'"
}
@@ -538,12 +588,12 @@ func getCloudbrainTwoMultiDataUrl(datasets []entity.ContainerData) string {
return string(jsondata)
}

func getCloudbrainTwoModelUrl(datasets []entity.ContainerData) string {
if len(datasets) == 0 {
func getCloudbrainTwoModelUrl(pretrainModels []entity.ContainerData) string {
if len(pretrainModels) == 0 {
return ""
}
var modelUrlList []models.ModelUrls
for _, d := range datasets {
for _, d := range pretrainModels {
modelUrlList = append(modelUrlList, models.ModelUrls{
ModelUrl: d.S3DownloadUrl,
ModelName: d.Name,
@@ -725,7 +775,7 @@ func getModelartsTrainJob(jobID string, versionID int64, baseLine string, order

func (c CloudbrainTwoClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDownloadInfoOpts) (*entity.FileDownloadInfo, error) {
var err error
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)

//查找日志文件
files := getLogFilesInStorage(helper, opts.ObjectKeyPrefix, ".log")
@@ -779,7 +829,7 @@ func (c CloudbrainTwoClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDo
}

func (c CloudbrainTwoClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
url, err := helper.GetSignedDownloadUrl(opts.Path)
if err != nil {
log.Error("GetSignedDownloadUrl err.opts=%+v,err =%v", opts, err)
@@ -849,7 +899,7 @@ func (c CloudbrainTwoClusterAdapter) GetNodeInfo(opts entity.ClusterNodeInfoOpts
}

func (c CloudbrainTwoClusterAdapter) GetOutput(opts entity.ClusterOutputOpts) (*entity.ClusterAITaskOutput, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetOneLevelObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetOneLevelObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)
@@ -863,7 +913,7 @@ func (c CloudbrainTwoClusterAdapter) GetOutput(opts entity.ClusterOutputOpts) (*
}

func (c CloudbrainTwoClusterAdapter) GetAllOutput(opts entity.ClusterOutputOpts) (*entity.AllAITaskOutput, error) {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
fileList, err := helper.GetAllObjectsUnderDir(path.Join(opts.ObjectKeyPrefix, opts.ParentDir))
if err != nil {
log.Error("GetAllObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.ObjectKeyPrefix, err)


+ 1
- 1
services/ai_task_service/cluster/common.go View File

@@ -97,7 +97,7 @@ func getLogFilesInStorage(helper storage_helper.StorageHelper, objectKeyPrefix s
}

func DownloadAllOutput(opts entity.DownloadOutputOpts) error {
helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType)
helper := storage_helper.SelectStorageHelperFromStorageType(opts.StorageType)
var err error
fileList, err := helper.GetAllObjectsUnderDir(opts.Path)
if err != nil {


+ 2
- 1
services/ai_task_service/container_builder/code_builder.go View File

@@ -45,7 +45,7 @@ func (b *CodeBuilder) Build(ctx *context.CreationContext) ([]entity.ContainerDat
jobName := ctx.Request.JobName
repo := ctx.Repository
codeLocalPath := setting.JobPath + jobName + cloudbrain.CodeMountPath + "/"
uploader := storage_helper.SelectUploaderFromStorageType(storageTypes[0])
uploader := storage_helper.SelectStorageHelperFromStorageType(storageTypes[0])

remoteDir := uploader.GetJobDefaultObjectKeyPrefix(jobName) + opts.GetLocalPath()
//再次调试和在线运行notebook不需要下载、上传代码
@@ -93,6 +93,7 @@ func (b *CodeBuilder) Build(ctx *context.CreationContext) ([]entity.ContainerDat
RealPath: uploader.GetRealPath(objectKey),
IsDir: b.Opts.Uncompressed,
S3DownloadUrl: uploader.GetS3DownloadUrl(objectKey),
IsNeedUnzip: true,
StorageType: storageTypes[0],
}
return []entity.ContainerData{codeData}, nil


+ 6
- 6
services/ai_task_service/container_builder/common.go View File

@@ -2,16 +2,17 @@ package container_builder

import (
"bufio"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/services/ai_task_service/context"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
"errors"
"io"
"io/ioutil"
"os"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/services/ai_task_service/context"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
)

func DownloadCode(ctx *context.CreationContext, codeLocalPath string, uncompressed bool) error {
@@ -23,7 +24,6 @@ func DownloadCode(ctx *context.CreationContext, codeLocalPath string, uncompress
}
}
var commitId string

//目录为空时需要下载代码
if len(dir) == 0 {
if uncompressed {


+ 25
- 21
services/ai_task_service/container_builder/dataset_builder.go View File

@@ -26,22 +26,6 @@ func (b *DatasetBuilder) SetOpts(opts *entity.ContainerBuildOpts) {
}

func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.ContainerData, *response.BizError) {
if b.Opts.Disable {
return nil, nil
}
uuid := ctx.Request.DatasetUUIDStr
if uuid == "" {
return nil, nil
}
datasetInfos, err := models.GetDatasetInfo4AITask(uuid)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err)
return nil, response.DATASET_SELECT_ERROR
}
if len(datasetInfos) < len(strings.Split(uuid, ";")) {
log.Error("GetDatasetInfo count error.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return nil, response.PARTIAL_DATASETS_NOT_AVAILABLE
}
var data []entity.ContainerData

//如果是智算GPU调试任务,需要把dataset文件夹也挂载,这样提交镜像时才不会把dataset下的文件提交到镜像中
@@ -49,7 +33,7 @@ func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.Container
log.Info("mount dataset directory.")
jobName := ctx.Request.JobName
storageTypes := b.Opts.AcceptStorageType
uploader := storage_helper.SelectUploaderFromStorageType(storageTypes[0])
uploader := storage_helper.SelectStorageHelperFromStorageType(storageTypes[0])
remoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(jobName), b.Opts.GetLocalPath())
err := uploader.MKDIR(remoteDir)
if err != nil {
@@ -72,9 +56,27 @@ func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.Container
GetBackEndpoint: uploader.GetEndpoint(),
IsDir: true,
StorageType: storageTypes[0],
IsNeedUnzip: true,
})
}

if b.Opts.Disable {
return data, nil
}
uuid := ctx.Request.DatasetUUIDStr
if uuid == "" {
return data, nil
}
datasetInfos, err := models.GetDatasetInfo4AITask(uuid)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err)
return nil, response.DATASET_SELECT_ERROR
}
if len(datasetInfos) < len(strings.Split(uuid, ";")) {
log.Error("GetDatasetInfo count error.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return nil, response.PARTIAL_DATASETS_NOT_AVAILABLE
}

for _, datasetInfo := range datasetInfos {
var name, objectKey, s3DownloadUrl string
//如果不是压缩包,那么文件名是去掉后缀以后的数据集名称
@@ -89,10 +91,10 @@ func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.Container
}
//由于云脑一训练任务单数据集情况比较特殊,挂载时没有数据集名字的父文件夹,因此特殊处理
//todo AITask 解决此特殊处理
if ctx.Request.Cluster == entity.OpenICloudbrainOne &&
ctx.Request.JobType == models.JobTypeTrain && len(datasetInfos) == 1 {
name = ""
}
// if ctx.Request.Cluster == entity.OpenICloudbrainOne &&
// ctx.Request.JobType == models.JobTypeTrain && len(datasetInfos) == 1 {
// name = ""
// }
if datasetInfo.Type == models.TypeCloudBrainOne {
data = append(data, entity.ContainerData{
Name: name,
@@ -105,6 +107,7 @@ func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.Container
IsDir: b.Opts.Uncompressed,
Size: datasetInfo.Size,
StorageType: entity.MINIO,
IsNeedUnzip: true,
})

} else {
@@ -119,6 +122,7 @@ func (b *DatasetBuilder) Build(ctx *context.CreationContext) ([]entity.Container
IsDir: b.Opts.Uncompressed,
Size: datasetInfo.Size,
StorageType: entity.OBS,
IsNeedUnzip: true,
})
}
}


+ 1
- 1
services/ai_task_service/container_builder/log_path_builder.go View File

@@ -33,7 +33,7 @@ func (b *LogPathBuilder) Build(ctx *context.CreationContext) ([]entity.Container

jobName := ctx.Request.JobName

uploader := storage_helper.SelectUploaderFromStorageType(storageTypes[0])
uploader := storage_helper.SelectStorageHelperFromStorageType(storageTypes[0])
remoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(jobName), b.Opts.GetLocalPath())
if b.Opts.MKDIR {
err := uploader.MKDIR(remoteDir)


+ 1
- 1
services/ai_task_service/container_builder/output_path_builder.go View File

@@ -35,7 +35,7 @@ func (b *OutputPathBuilder) Build(ctx *context.CreationContext) ([]entity.Contai

jobName := ctx.Request.JobName

uploader := storage_helper.SelectUploaderFromStorageType(storageTypes[0])
uploader := storage_helper.SelectStorageHelperFromStorageType(storageTypes[0])
remoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(jobName), b.Opts.GetLocalPath())
if b.Opts.MKDIR {
err := uploader.MKDIR(remoteDir)


+ 86
- 104
services/ai_task_service/container_builder/pre_model_builder.go View File

@@ -1,20 +1,17 @@
package container_builder

import (
"code.gitea.io/gitea/routers/response"
"fmt"
"code.gitea.io/gitea/services/ai_model"
"path"
"strings"

"code.gitea.io/gitea/routers/response"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/services/ai_task_service/context"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"
)

type PretrainModelBuilder struct {
@@ -31,135 +28,120 @@ func (b *PretrainModelBuilder) SetOpts(opts *entity.ContainerBuildOpts) {
}

func (b *PretrainModelBuilder) Build(ctx *context.CreationContext) ([]entity.ContainerData, *response.BizError) {
form := ctx.Request
var preTrainModelEntity []entity.ContainerData
if ctx.Request.Cluster == entity.C2Net && (ctx.Request.JobType == models.JobTypeDebug || ctx.Request.JobType == models.JobTypeTrain) && ctx.Request.ComputeSource.Name == models.GPU {
//挂载一个文件夹保证容器内pretrainmodel目录提交镜像时不被打包
uploader := storage_helper.SelectStorageHelperFromStorageType(entity.OBS)
objectKey := path.Join(uploader.GetJobDefaultObjectKeyPrefix(form.JobName), "pretrain_model_mount")
uploader.MKDIR(objectKey, "pretrain model folder")
preTrainModelEntity = append(preTrainModelEntity, entity.ContainerData{
Name: "pretrain_model_mount",
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
ObjectKey: objectKey + "/",
ReadOnly: false,
ContainerPath: b.Opts.ContainerPath,
RealPath: uploader.GetRealPath(objectKey),
S3DownloadUrl: uploader.GetS3DownloadUrl(objectKey),
IsDir: true,
IsOverwrite: true,
IsNeedUnzip: false,
})
}

if b.Opts.Disable {
return nil, nil
return preTrainModelEntity, nil
}
form := ctx.Request
storageTypes := b.Opts.AcceptStorageType
if storageTypes == nil || len(storageTypes) == 0 {
return nil, response.SYSTEM_ERROR
}
//未选择预训练模型,跳过此步
if form.PretrainModelName == "" {
return nil, nil
}
if form.PretrainModelId == "" {
//异常数据,理论上应该都有modelId
return nil, response.RESULT_CLEARD
return preTrainModelEntity, nil
}
//查出模型数据
m, err := models.QueryModelById(form.PretrainModelId)
if err != nil {
uuids := strings.Split(form.PretrainModelId, ";")
modelInfoMaps, err := models.QueryModelMapsByIds(uuids)
if err != nil || len(modelInfoMaps) == 0 {
log.Error("Can not find model", err)
return nil, response.MODEL_NOT_EXISTS
}
preTrainModelUrl := m.Path
if err != nil {
log.Error("Can not find model", err)
return nil, response.MODEL_NOT_EXISTS

for _, m := range modelInfoMaps {
ai_model.InitModelMeta(m.ID)
data, err := b.buildModelData(m, form.JobName)
if err != nil {
return nil, response.SYSTEM_ERROR
}
preTrainModelEntity = append(preTrainModelEntity, data)
}
//模型文件存储方式

return preTrainModelEntity, nil
}

func (b *PretrainModelBuilder) GetContainerType() entity.ContainerDataType {
return entity.ContainerPreTrainModel
}

const MODEL_MKDIR_README = "The model files have already been loaded into the container and are ready for use.\n"

func (b *PretrainModelBuilder) buildModelData(m *models.AiModelManage, jobName string) (entity.ContainerData, *response.BizError) {
oldStorageType := entity.GetStorageTypeFromCloudbrainType(m.Type)
if oldStorageType == "" {
log.Error("model storage type error.modelId=%d", m.ID)
return nil, response.SYSTEM_ERROR
return entity.ContainerData{}, response.SYSTEM_ERROR
}
oldStorageHelper := storage_helper.SelectStorageHelperFromStorageType(oldStorageType)

var preTrainModelPath string
var preTrainModelEntity []entity.ContainerData
preTrainModelPath := getPreTrainModelPath(m.Path)
storageType := oldStorageType
ckptNames := strings.Split(form.PretrainModelCkptName, ";")
for _, ckptName := range ckptNames {
isExists, size := cloudbrainTask.CheckAndGetFileSize(m, ckptName)
if !isExists {
log.Error("model file not exist.name = %s", ckptName)
return nil, response.MODEL_NOT_EXISTS
}
preTrainModelPath = getPreTrainModelPath(preTrainModelUrl, ckptName)
if !b.Opts.IsStorageTypeIn(oldStorageType) {
//意味着模型之前存储的位置不符合要求,需要转存到指定存储
newStorageType := b.Opts.AcceptStorageType[0]
//todo 可优化
if newStorageType == entity.MINIO && oldStorageType == entity.OBS {
//复用以前代码
minioPreModelURL, err := dealModelInfo(form.PretrainModelId, form.JobName, ckptName)
if err != nil {
log.Error("Can not find model,modelId=%d err=%v", form.PretrainModelId, err)
return nil, response.MODEL_NOT_EXISTS
}
preTrainModelUrl = minioPreModelURL
preTrainModelPath = getPreTrainModelPath(minioPreModelURL, ckptName)
storageType = entity.MINIO
if !b.Opts.IsStorageTypeIn(oldStorageType) {
//意味着模型之前存储的位置不符合要求,需要转存到指定存储
newStorageType := b.Opts.AcceptStorageType[0]
newStorageHelper := storage_helper.SelectStorageHelperFromStorageType(newStorageType)
files, err := oldStorageHelper.GetAllObjectsUnderDir(preTrainModelPath)
newObjectPrefix := path.Join(newStorageHelper.GetJobDefaultObjectKeyPrefix(jobName), b.Opts.GetLocalPath(), m.Name)
for _, file := range files {
newFilePath := path.Join(newObjectPrefix, file.FileName)
err = storage_helper.TransferFileBetweenStorage(oldStorageHelper, newStorageHelper, file.RelativePath, newFilePath)
if err != nil {
log.Error("transfer file between storage error.model=%+v file=%+v err=%v", m, file, err)
return entity.ContainerData{}, response.SYSTEM_ERROR
}
}
uploader := storage_helper.SelectUploaderFromStorageType(storageType)
modelData := entity.ContainerData{
Name: ckptName,
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
ObjectKey: preTrainModelPath,
ReadOnly: b.Opts.ReadOnly,
ContainerPath: path.Join(b.Opts.ContainerPath, ckptName),
RealPath: uploader.GetRealPath(preTrainModelPath),
S3DownloadUrl: uploader.GetS3DownloadUrl(preTrainModelPath),
IsDir: false,
Size: size,
IsOverwrite: true,
}
preTrainModelEntity = append(preTrainModelEntity, modelData)
preTrainModelPath = newObjectPrefix
storageType = newStorageType
}
form.PreTrainModelUrl = preTrainModelUrl
return preTrainModelEntity, nil
}

func (b *PretrainModelBuilder) GetContainerType() entity.ContainerDataType {
return entity.ContainerPreTrainModel
uploader := storage_helper.SelectStorageHelperFromStorageType(storageType)
uploader.MKDIR(preTrainModelPath, MODEL_MKDIR_README)
modelData := entity.ContainerData{
Name: m.Name,
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
ObjectKey: preTrainModelPath,
ReadOnly: false,
ContainerPath: path.Join(b.Opts.ContainerPath, m.Name),
RealPath: uploader.GetRealPath(preTrainModelPath),
S3DownloadUrl: uploader.GetS3DownloadUrl(preTrainModelPath),
IsDir: true,
Size: m.Size,
IsOverwrite: true,
IsNeedUnzip: false,
}
return modelData, nil
}

func getPreTrainModelPath(pretrainModelDir string, fileName string) string {
func getPreTrainModelPath(pretrainModelDir string) string {
index := strings.Index(pretrainModelDir, "/")
if index > 0 {
filterBucket := pretrainModelDir[index+1:]
return filterBucket + fileName
return filterBucket
} else {
return ""
}

}

func dealModelInfo(modelId string, jobName string, ckptName string) (string, error) {
preModel, err := models.QueryModelById(modelId)
if err != nil || preModel == nil || preModel.ID == "" {
log.Error("Can not find model", err)
return "", fmt.Errorf("Can not find model: %v", ckptName)
}
minioPreModelURL, err := downloadModelFromObs(preModel, jobName, cloudbrain.PretrainModelMountPath, ckptName)
if err != nil {
log.Error("Can not find model", err)

return "", err
}
return minioPreModelURL, nil
}

func downloadModelFromObs(preModel *models.AiModelManage, jobName, suffixPath string, ckptFileName string) (string, error) {
destPath := setting.CBCodePathPrefix + jobName + suffixPath + "/"
destFile := destPath + ckptFileName
returnStr := setting.Attachment.Minio.Bucket + "/" + destPath
srcUrl := preModel.Path[len(setting.Bucket)+1:] + ckptFileName
log.Info("dest model Path=" + returnStr + " src path=" + preModel.Path + ckptFileName)
body, err := storage.ObsDownloadAFile(setting.Bucket, srcUrl)
if err == nil {
defer body.Close()
_, err = storage.Attachments.UploadContent(setting.Attachment.Minio.Bucket, destFile, body)
if err != nil {
log.Error("UploadObject(%s) failed: %s", preModel.Path+ckptFileName, err.Error())
return "", err
}
} else {
log.Info("download model failed. as " + err.Error())
return "", err
}
log.Info("download model from obs succeed")
return returnStr, nil
}

+ 4
- 2
services/ai_task_service/context/context.go View File

@@ -1,10 +1,11 @@
package context

import (
"encoding/json"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"encoding/json"
)

type CreationContext struct {
@@ -13,13 +14,13 @@ type CreationContext struct {
GitRepo *git.Repository
Repository *models.Repository
Spec *models.Specification
Queues []models.ResourceQueue
User *models.User
CommitID string
Response *entity.CreationResponse
SourceCloudbrain *models.Cloudbrain
NewCloudbrain *models.Cloudbrain
Config *entity.AITaskBaseConfig
Queues []models.ResourceQueue
}

func (ctx *CreationContext) AddContainerData(t entity.ContainerDataType, d []entity.ContainerData) {
@@ -63,6 +64,7 @@ func (ctx *CreationContext) BuildCloudbrainConfig() *models.CloudbrainConfig {
output := ctx.GetContainerData(entity.ContainerOutPutPath)
log := ctx.GetContainerData(entity.ContainerLogPath)
c := &models.CloudbrainConfig{

ConfigurationSnapshot: aiConfigStr,
OutputBucket: output.Bucket,
OutputObjectPrefix: output.ObjectKey,


+ 12
- 2
services/ai_task_service/storage_helper/client.go View File

@@ -14,11 +14,12 @@ type UploaderConfig struct {

type StorageHelper interface {
UploadDir(codePath, jobName string) error
UploadFile(objectKey string, r io.Reader) error
GetRealPath(objectKey string) string
GetBucket() string
GetEndpoint() string
GetJobDefaultObjectKeyPrefix(jobName string) string
MKDIR(path string) error
MKDIR(path string, description ...string) error
GetOneLevelObjectsUnderDir(dirPath string, maxKeyArray ...int) ([]storage.FileInfo, error)
GetAllObjectsUnderDir(prefix string, maxKeyArray ...int) ([]storage.FileInfo, error)
OpenFile(path string) (io.ReadCloser, error)
@@ -27,7 +28,7 @@ type StorageHelper interface {
CopyByPath(sourcePath, targetPath string, filterSuffix []string) error
}

func SelectUploaderFromStorageType(storageType entity.StorageType) StorageHelper {
func SelectStorageHelperFromStorageType(storageType entity.StorageType) StorageHelper {
switch storageType {
case entity.OBS:
return &OBSHelper{}
@@ -46,3 +47,12 @@ func isMatchSuffix(fileName string, filterSuffix []string) bool {
return false

}

func TransferFileBetweenStorage(old, new StorageHelper, oldFilePath, newFilePath string) error {
body, err := old.OpenFile(oldFilePath)
if err != nil {
return err
}
defer body.Close()
return new.UploadFile(newFilePath, body)
}

+ 14
- 2
services/ai_task_service/storage_helper/minio.go View File

@@ -21,6 +21,15 @@ type MinioHelper struct {
func (m *MinioHelper) UploadDir(codePath, objectKeyPrefix string) error {
return UploadDirToMinio(codePath, objectKeyPrefix, "")
}

func (m *MinioHelper) UploadFile(objectKey string, r io.Reader) error {
_, err := storage.Attachments.UploadContent(m.GetBucket(), objectKey, r)
if err != nil {
return err
}
return nil
}

func (m *MinioHelper) GetJobDefaultObjectKeyPrefix(jobName string) string {
return path.Join(setting.CBCodePathPrefix, jobName)
}
@@ -38,10 +47,13 @@ func (m *MinioHelper) GetEndpoint() string {

const README = "README"

func (m *MinioHelper) MKDIR(path string) error {
func (m *MinioHelper) MKDIR(path string, description ...string) error {
//无法直接创建空文件夹,上传一个readme文件模拟
path = strings.TrimSuffix(path, "/") + "/" + README
val := "You can put the files into this directory and download the files by the web page."
val := "read me."
if description != nil && len(description) > 0 {
val = description[0]
}
_, err := storage.Attachments.UploadContent(m.GetBucket(), path, bytes.NewReader([]byte(val)))
return err
}


+ 13
- 1
services/ai_task_service/storage_helper/obs.go View File

@@ -19,6 +19,18 @@ func (m *OBSHelper) UploadDir(codePath, objectKeyPrefix string) error {
return UploadDirToObs(codePath, objectKeyPrefix, "")
}

func (m *OBSHelper) UploadFile(objectKey string, r io.Reader) error {
input := &obs.PutObjectInput{}
input.Bucket = m.GetBucket()
input.Key = objectKey
input.Body = r
_, err := storage.ObsCli.PutObject(input)
if err != nil {
return err
}
return nil
}

func (m *OBSHelper) GetJobDefaultObjectKeyPrefix(jobName string) string {
return path.Join(setting.CodePathPrefix, jobName)
}
@@ -30,7 +42,7 @@ func (m *OBSHelper) GetRealPath(objectKey string) string {
func (m *OBSHelper) GetBucket() string {
return setting.Bucket
}
func (m *OBSHelper) MKDIR(path string) error {
func (m *OBSHelper) MKDIR(path string, description ...string) error {
path = strings.TrimSuffix(path, "/") + "/"
input := &obs.PutObjectInput{}
input.Bucket = setting.Bucket


+ 67
- 5
services/ai_task_service/storage_helper/repo.go View File

@@ -1,7 +1,15 @@
package storage_helper

import (
"archive/zip"
"bufio"
"fmt"
"io"
"os"
"path"
"path/filepath"
"strings"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/grampus"
@@ -9,12 +17,7 @@ import (
"code.gitea.io/gitea/modules/obs"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"fmt"
"github.com/unknwon/com"
"io"
"os"
"path"
"strings"
)

func DownloadZipCode(gitRepo *git.Repository, codePath, branchName string) (commitId string, err error) {
@@ -199,5 +202,64 @@ func DownloadCode(gitRepo *git.Repository, repo *models.Repository, codePath, br
pos += int64(len(line))
}
commitID, _ := gitRepo.GetBranchCommitID(branchName)

return commitID, nil
}

func createZipArchive(sourceDir, archivePath string) error {
archiveFile, err := os.Create(archivePath)
if err != nil {
return err
}
defer archiveFile.Close()

zipWriter := zip.NewWriter(archiveFile)
defer zipWriter.Close()

err = filepath.Walk(sourceDir, func(filePath string, info os.FileInfo, err error) error {
if err != nil {
return err
}

relPath, err := filepath.Rel(sourceDir, filePath)
if err != nil {
return err
}

// 排除压缩包本身
if strings.TrimSuffix(relPath, "/") == filepath.Base(archivePath) {
return nil
}

if info.IsDir() {
// 创建目录项
zipEntry, err := zipWriter.Create(filepath.ToSlash(relPath) + "/") // 目录项以斜杠结尾
if err != nil {
return err
}
_ = zipEntry // 忽略目录项

return nil
}

zipEntry, err := zipWriter.Create(filepath.ToSlash(relPath)) // 使用正斜杠分隔符
if err != nil {
return err
}

file, err := os.Open(filePath)
if err != nil {
return err
}
defer file.Close()

_, err = io.Copy(zipEntry, file)
if err != nil {
return err
}

return nil
})

return err
}

+ 5
- 1
services/ai_task_service/task/cloudbrain_one_notebook_task.go View File

@@ -34,6 +34,8 @@ func GetCloudbrainOneNotebookConfig(opts entity.AITaskConfigKey) *entity.AITaskB
IsActionUseJobId: false,
DatasetsLimitSizeGB: setting.DebugAttachSize,
DatasetsMaxNum: setting.MaxDatasetNum,
ModelLimitSizeGB: setting.DEBUG_MODEL_SIZE_LIMIT_GB,
ModelMaxNum: setting.DEBUG_MODEL_NUM_LIMIT,
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
entity.ContainerCode: {
ContainerPath: "/code",
@@ -88,6 +90,7 @@ func (t CloudbrainOneNotebookTaskTemplate) Create(ctx *context.CreationContext)
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModels).
Next(t.CheckBranchExists).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
@@ -104,8 +107,9 @@ func (t CloudbrainOneNotebookTaskTemplate) Restart(ctx *context.CreationContext)
c := &CreateOperator{}
err := c.Next(t.BuildRequest4Restart).
Next(t.CheckSourceTaskIsCleared).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.CheckDatasets).
Next(t.CheckModels).
Next(t.CheckParamFormat).
Next(t.CheckMultiRequest).
Next(t.LoadSpec).


+ 1
- 1
services/ai_task_service/task/cloudbrain_one_train_task.go View File

@@ -80,7 +80,7 @@ func (t CloudbrainOneTrainTaskTemplate) Create(ctx *context.CreationContext) (*e
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 1
- 1
services/ai_task_service/task/cloudbrain_two_inference_task.go View File

@@ -81,7 +81,7 @@ func (t CloudbrainTwoInferenceTaskTemplate) Create(ctx *context.CreationContext)
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 14
- 5
services/ai_task_service/task/cloudbrain_two_notebook_task.go View File

@@ -1,6 +1,8 @@
package task

import (
"strings"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/convert"
@@ -11,7 +13,6 @@ import (
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/ai_task_service/context"
"code.gitea.io/gitea/services/cloudbrain/resource"
"strings"
)

type CloudbrainTwoNotebookTaskTemplate struct {
@@ -36,17 +37,19 @@ func GetCloudbrainTwoNotebookConfig(opts entity.AITaskConfigKey) *entity.AITaskB
IsActionUseJobId: false,
DatasetsLimitSizeGB: setting.DebugAttachSize,
DatasetsMaxNum: setting.MaxDatasetNum,
ModelLimitSizeGB: setting.DEBUG_MODEL_SIZE_LIMIT_GB,
ModelMaxNum: setting.DEBUG_MODEL_NUM_LIMIT,
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
entity.ContainerCode: {
Disable: true,
Disable: false,
AcceptStorageType: []entity.StorageType{entity.OBS},
},
entity.ContainerDataset: {
Disable: true,
Disable: false,
AcceptStorageType: []entity.StorageType{entity.OBS},
},
entity.ContainerPreTrainModel: {
Disable: true,
Disable: false,
AcceptStorageType: []entity.StorageType{entity.OBS},
},
},
@@ -69,6 +72,7 @@ func (t CloudbrainTwoNotebookTaskTemplate) Create(ctx *context.CreationContext)
Next(t.CheckMultiRequest).
Next(t.CheckDisplayJobName).
Next(t.CheckNotebookCount).
Next(t.CheckModels).
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
@@ -88,7 +92,7 @@ func (t CloudbrainTwoNotebookTaskTemplate) Restart(ctx *context.CreationContext)
c := &CreateOperator{}
err := c.Next(t.BuildRequest4Restart).
Next(t.CheckSourceTaskIsCleared).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.CheckDatasets).
Next(t.CheckParamFormat).
Next(t.CheckMultiRequest).
@@ -111,6 +115,7 @@ func (t CloudbrainTwoNotebookTaskTemplate) Restart(ctx *context.CreationContext)
}

func (g CloudbrainTwoNotebookTaskTemplate) CallCreationAPI(ctx *context.CreationContext) *response.BizError {
log.Info("ctx is here:", ctx)
c := g.GetMyCluster()
if c == nil {
return response.SYSTEM_ERROR
@@ -127,6 +132,10 @@ func (g CloudbrainTwoNotebookTaskTemplate) CallCreationAPI(ctx *context.Creation
ImageUrl: strings.TrimSpace(form.ImageUrl),
AutoStopDuration: autoStopDurationMs,
Spec: ctx.Spec,
Datasets: ctx.GetContainerDataArray(entity.ContainerDataset),
Code: ctx.GetContainerDataArray(entity.ContainerCode),
PreTrainModel: ctx.GetContainerDataArray(entity.ContainerPreTrainModel),
OutPut: ctx.GetContainerDataArray(entity.ContainerOutPutPath),
},
},
}


+ 1
- 1
services/ai_task_service/task/cloudbrain_two_train_task.go View File

@@ -81,7 +81,7 @@ func (t CloudbrainTwoTrainTaskTemplate) Create(ctx *context.CreationContext) (*e
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 1
- 1
services/ai_task_service/task/grampus_inference_task.go View File

@@ -85,7 +85,7 @@ func (t GrampusInferenceTaskTemplate) Create(ctx *context.CreationContext) (*ent
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 13
- 9
services/ai_task_service/task/grampus_notebook_task.go View File

@@ -29,9 +29,9 @@ func init() {
}

func GetGrampusNoteBookConfig(opts entity.AITaskConfigKey) *entity.AITaskBaseConfig {
codePath := "/code"
datasetPath := "/dataset"
pretrainModelPath := "/pretrainmodel"
codePath := "/tmp/code"
datasetPath := "/tmp/dataset"
pretrainModelPath := "/tmp/pretrainmodel"

config := &entity.AITaskBaseConfig{
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
@@ -101,17 +101,17 @@ func GetGrampusNoteBookConfig(opts entity.AITaskConfigKey) *entity.AITaskBaseCon
config = &entity.AITaskBaseConfig{
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
entity.ContainerCode: {
ContainerPath: "/tmp" + codePath,
ContainerPath: codePath,
ReadOnly: false,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
entity.ContainerDataset: {
ContainerPath: "/tmp" + datasetPath,
ContainerPath: datasetPath,
ReadOnly: true,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
entity.ContainerPreTrainModel: {
ContainerPath: "/tmp" + pretrainModelPath,
ContainerPath: pretrainModelPath,
ReadOnly: true,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
@@ -161,6 +161,8 @@ func GetGrampusNoteBookConfig(opts entity.AITaskConfigKey) *entity.AITaskBaseCon
config.IsActionUseJobId = false
config.DatasetsLimitSizeGB = setting.DebugAttachSize
config.DatasetsMaxNum = setting.MaxDatasetNum
config.ModelLimitSizeGB = setting.DEBUG_MODEL_SIZE_LIMIT_GB
config.ModelMaxNum = setting.DEBUG_MODEL_NUM_LIMIT
return config
}

@@ -174,7 +176,7 @@ func (t GrampusNoteBookTaskTemplate) Create(ctx *context.CreationContext) (*enti
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckBranchExists).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)
@@ -189,7 +191,7 @@ func (t GrampusNoteBookTaskTemplate) Restart(ctx *context.CreationContext) (*ent
c := &CreateOperator{}
err := c.Next(t.BuildRequest4Restart).
Next(t.CheckSourceTaskIsCleared).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.CheckDatasets).
Next(t.CheckParamFormat).
Next(t.CheckMultiRequest).
@@ -231,8 +233,10 @@ func (g GrampusNoteBookTaskTemplate) CallCreationAPI(ctx *context.CreationContex
ResourceSpecId: ctx.Spec.SourceSpecId,
ImageId: form.ImageID,
ImageUrl: imageUrl,
Datasets: append(ctx.GetContainerDataArray(entity.ContainerDataset), ctx.GetContainerDataArray(entity.ContainerPreTrainModel)...),
Datasets: ctx.GetContainerDataArray(entity.ContainerDataset),
PreTrainModel: ctx.GetContainerDataArray(entity.ContainerPreTrainModel),
Code: ctx.GetContainerDataArray(entity.ContainerCode),
EnvVariables: models.GrampusEnvVarReq{},
AutoStopDuration: autoStopDurationMs,
Capacity: setting.Capacity,
Queues: ctx.Queues,


+ 18
- 6
services/ai_task_service/task/grampus_online_infer_task.go View File

@@ -30,16 +30,18 @@ func init() {
}

func GetGrampusOnlineInferConfig(opts entity.AITaskConfigKey) *entity.AITaskBaseConfig {
codePath := "/code"
datasetPath := "/dataset"
pretrainModelPath := "/pretrainmodel"
outputPath := "/output"
codePath := "/tmp/code"
datasetPath := "/tmp/dataset"
pretrainModelPath := "/tmp/pretrainmodel"
outputPath := "/tmp/output"

config := &entity.AITaskBaseConfig{
ActionType: models.ActionCreateGrampusGPUOnlineInferTask,
IsActionUseJobId: false,
DatasetsLimitSizeGB: setting.DebugAttachSize,
DatasetsMaxNum: setting.MaxDatasetNum,
ModelLimitSizeGB: setting.DEBUG_MODEL_SIZE_LIMIT_GB,
ModelMaxNum: setting.DEBUG_MODEL_NUM_LIMIT,
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
entity.ContainerCode: {
ContainerPath: codePath,
@@ -78,7 +80,7 @@ func (t GrampusOnlineInferTaskTemplate) Create(ctx *context.CreationContext) (*e
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckBranchExists).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)
@@ -116,7 +118,8 @@ func (g GrampusOnlineInferTaskTemplate) CallCreationAPI(ctx *context.CreationCon
ResourceSpecId: ctx.Spec.SourceSpecId,
ImageId: form.ImageID,
ImageUrl: imageUrl,
Datasets: append(ctx.GetContainerDataArray(entity.ContainerDataset), ctx.GetContainerDataArray(entity.ContainerPreTrainModel)...),
Datasets: append(ctx.GetContainerDataArray(entity.ContainerDataset)),
PreTrainModel: append(ctx.GetContainerDataArray(entity.ContainerPreTrainModel)),
Code: ctx.GetContainerDataArray(entity.ContainerCode),
OutPut: ctx.GetContainerDataArray(entity.ContainerOutPutPath),
AutoStopDuration: -1,
@@ -160,3 +163,12 @@ func (g GrampusOnlineInferTaskTemplate) LoadSpec(ctx *context.CreationContext) *
ctx.Spec = spec
return nil
}

func (GrampusOnlineInferTaskTemplate) GetAvailableQueues(ctx *context.CreationContext) *response.BizError {
ctx.Queues = ctx.Spec.GetAvailableQueues(models.GetAvailableCenterIdOpts{
UserId: ctx.User.ID,
JobType: models.JobTypeDebug,
HasInternet: ctx.Request.HasInternet,
})
return nil
}

+ 3
- 2
services/ai_task_service/task/grampus_train_task.go View File

@@ -1,6 +1,8 @@
package task

import (
"strings"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
@@ -9,7 +11,6 @@ import (
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/routers/response"
"code.gitea.io/gitea/services/ai_task_service/context"
"strings"
)

type GrampusTrainTaskTemplate struct {
@@ -114,7 +115,7 @@ func (t GrampusTrainTaskTemplate) Create(ctx *context.CreationContext) (*entity.
Next(t.LoadSpec).
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 49
- 31
services/ai_task_service/task/opt_handler.go View File

@@ -30,7 +30,6 @@ type CreationHandler interface {
CheckPointBalance(ctx *context.CreationContext) *response.BizError
CheckDatasets(ctx *context.CreationContext) *response.BizError
CheckBranchExists(ctx *context.CreationContext) *response.BizError
CheckModel(ctx *context.CreationContext) *response.BizError
CheckBootFile(ctx *context.CreationContext) *response.BizError
CheckSourceTaskIsCleared(ctx *context.CreationContext) *response.BizError
BuildContainerData(ctx *context.CreationContext) *response.BizError
@@ -46,7 +45,7 @@ type CreationHandler interface {
GetAvailableQueues(ctx *context.CreationContext) *response.BizError
}

//DefaultCreationHandler CreationHandler的默认实现,公共逻辑可以在此结构体中实现
// DefaultCreationHandler CreationHandler的默认实现,公共逻辑可以在此结构体中实现
type DefaultCreationHandler struct {
}

@@ -91,13 +90,10 @@ func (DefaultCreationHandler) BuildRequest4Restart(ctx *context.CreationContext)
Cluster: entity.GetClusterTypeFromCloudbrainType(task.Type),
WorkServerNumber: task.WorkServerNumber,
BranchName: task.BranchName,
PreTrainModelUrl: task.PreTrainModelUrl,
PretrainModelCkptName: task.CkptName,
ImageUrl: imageUrl,
ImageID: imageId,
ImageName: imageName,
PretrainModelName: task.ModelName,
PretrainModelVersion: task.ModelVersion,
Description: task.Description,
LabelName: task.LabelName,
DatasetUUIDStr: task.Uuid,
@@ -159,6 +155,51 @@ func (g DefaultCreationHandler) CheckDatasets(ctx *context.CreationContext) *res
return nil
}

func (g DefaultCreationHandler) CheckModels(ctx *context.CreationContext) *response.BizError {
log.Info("Start to CheckModels.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
modelIdStr := ctx.Request.PretrainModelId
if modelIdStr == "" {
return nil
}
//check model num
uuids := strings.Split(modelIdStr, ";")
if ctx.Config.ModelMaxNum > 0 && len(uuids) > ctx.Config.ModelMaxNum {
log.Error("the dataset count(%d) exceed the limit", len(uuids))
return response.MODEL_NUMBER_OVER_LIMIT
}

modelInfoMaps, err := models.QueryModelMapsByIds(uuids)
if err != nil {
log.Error("QueryModelsByIds failed: %v", err)
return response.SYSTEM_ERROR
}

if len(modelInfoMaps) < len(uuids) {
log.Info("CheckModels has model deleted.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return response.MODEL_NOT_EXISTS
}

//check datasets size
var attachSize int64
for _, infos := range modelInfoMaps {
attachSize += infos.Size
}
limitSizeGB := ctx.Config.ModelLimitSizeGB
if limitSizeGB > 0 && attachSize > int64(limitSizeGB*1000*1000*1000) {
log.Error("The model size exceeds the limit (%dGB)", limitSizeGB) // GB
return response.MODEL_SIZE_OVER_LIMIT.WithParams(limitSizeGB)
}

var modelNames string
for i := 0; i < len(uuids); i++ {
m := modelInfoMaps[uuids[i]]
modelNames += m.Name + ";"
}
ctx.Request.ModelNames = strings.TrimSuffix(modelNames, ";")
log.Info("CheckModels success.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return nil
}

func (DefaultCreationHandler) CheckBranchExists(ctx *context.CreationContext) *response.BizError {
log.Info("Start to CheckBranchExists.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
if ctx.GitRepo == nil || ctx.Request.BranchName == "" {
@@ -171,20 +212,6 @@ func (DefaultCreationHandler) CheckBranchExists(ctx *context.CreationContext) *r
return nil
}

func (DefaultCreationHandler) CheckModel(ctx *context.CreationContext) *response.BizError {
log.Info("Start to CheckModel.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
if hasModelNumOverLimit(ctx.Request.PretrainModelCkptName) { //检查模型数量是否超出限制
log.Info("CheckModel hasModelNumOverLimit displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return response.MODEL_NUM_OVER_LIMIT
}
if hasModelFileDeleted(ctx.Request.PretrainModelId, ctx.Request.PretrainModelCkptName) { //检查模型文件是否存在
log.Info("CheckModel hasModelFileDeleted.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return response.MODEL_NOT_EXISTS
}
log.Info("CheckModel success.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
return nil
}

func (DefaultCreationHandler) CheckSourceTaskIsCleared(ctx *context.CreationContext) *response.BizError {
log.Info("Start to CheckSourceTaskIsCleared.displayJobName=%s jobType=%s cluster=%s", ctx.Request.DisplayJobName, ctx.Request.JobType, ctx.Request.Cluster)
task := ctx.SourceCloudbrain
@@ -413,12 +440,9 @@ func (DefaultCreationHandler) InsertCloudbrainRecord4Async(ctx *context.Creation
WorkServerNumber: req.WorkServerNumber,
EngineName: imageUrl,
Spec: ctx.Spec,
ModelName: req.PretrainModelName,
ModelVersion: req.PretrainModelVersion,
ModelName: req.ModelNames,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
ModelId: req.PretrainModelId,
CkptName: req.PretrainModelCkptName,
SubTaskName: models.SubTaskName,
CreatedUnix: timeutil.TimeStampNow(),
UpdatedUnix: timeutil.TimeStampNow(),
@@ -478,12 +502,9 @@ func (DefaultCreationHandler) AfterCallCreationAPI4Sync(ctx *context.CreationCon
WorkServerNumber: req.WorkServerNumber,
EngineName: imageUrl,
Spec: ctx.Spec,
ModelName: req.PretrainModelName,
ModelVersion: req.PretrainModelVersion,
ModelName: req.ModelNames,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
ModelId: req.PretrainModelId,
CkptName: req.PretrainModelCkptName,
SubTaskName: models.SubTaskName,
JobID: res.JobID,
Status: TransAITaskStatus(res.Status),
@@ -590,11 +611,8 @@ func (DefaultCreationHandler) CreateCloudbrainRecord4Restart(ctx *context.Creati
CreatedUnix: res.CreateTime,
UpdatedUnix: res.CreateTime,
Spec: ctx.Spec,
ModelName: req.PretrainModelName,
ModelVersion: req.PretrainModelVersion,
ModelName: req.ModelNames,
LabelName: req.LabelName,
PreTrainModelUrl: req.PreTrainModelUrl,
CkptName: req.PretrainModelCkptName,
SubTaskName: models.SubTaskName,
ModelId: req.PretrainModelId,
GpuQueue: ctx.Spec.QueueCode,


+ 138
- 0
services/ai_task_service/task/sdk_util.go View File

@@ -0,0 +1,138 @@
package task

import (
"fmt"
"math/rand"
"regexp"
"strings"
"unicode"

"code.gitea.io/gitea/models"
)

func GenerateSDKCode(datasetNames, pretrainModelNames, parameterKeys []string, jobType models.JobType) string {
return generateAITaskSDKCode(datasetNames, pretrainModelNames, parameterKeys, jobType)
}

func generateAITaskSDKCode(datasetNames, pretrainModelNames, parameterKeys []string, jobType models.JobType) string {
var code string
if len(parameterKeys) > 0 {
//添加参数解析代码
code = code + "import argparse\n\nparser = argparse.ArgumentParser(description='忽略超参数不存在的报错问题')\n#添加自定义参数\n"
for i := 0; i < len(parameterKeys); i++ {
code = code + fmt.Sprintf("parser.add_argument(\"--%s\")\n", parameterKeys[i])
}
code = code + "args, unknown = parser.parse_known_args()\n\n"
}

//判断是否需要执行prepare方法
shouldPrepareCtx := true
if len(datasetNames) == 0 && len(pretrainModelNames) == 0 {
shouldPrepareCtx = false
}
//判断是否需要执行回传方法
shouldAddUploadCode := shouldAddUpload(jobType)
if !shouldPrepareCtx && !shouldAddUploadCode {
return code
}
// code = code + "import os\nos.system(\"pip install openi\")\n"

//添加import相关代码
if !shouldAddUploadCode {
code = code + "from c2net.context import prepare\n\n"
} else if !shouldPrepareCtx {
code = code + "from c2net.context import upload_output\n\n"
} else {
code = code + "from c2net.context import prepare,upload_output\n\n"
}

//添加prepare()方法
if shouldPrepareCtx {
code = code + "#初始化导入数据集和预训练模型到容器内\nc2net_context = prepare()\n"
}

//添加数据集相关代码
var datasetNameMap = make(map[string]string, 0)
for i := 0; i < len(datasetNames); i++ {
if i == 0 {
code = code + "\n#获取数据集路径\n"
}
datasetName := strings.TrimSuffix(datasetNames[i], ".zip")
datasetName = strings.TrimSuffix(datasetName, ".tar.gz")
validName := makeValidPythonVariableName(datasetName)
for {
if _, exists := datasetNameMap[datasetName]; exists {
datasetName = datasetName + "_" + generateRandomString(3)
} else {
break
}
}
datasetNameMap[datasetName] = ""
pathCode := fmt.Sprintf("%s_path = c2net_context.dataset_path+\"/\"+\"%s\"\n", validName, datasetName)
code = code + pathCode
}

//添加预训练模型相关代码
var pretrainModelNameMap = make(map[string]string, 0)
for i := 0; i < len(pretrainModelNames); i++ {
if i == 0 {
code = code + "\n#获取预训练模型路径\n"
}
modelName := makeValidPythonVariableName(pretrainModelNames[i])
for {
if _, exists := pretrainModelNameMap[modelName]; exists {
modelName = modelName + "_" + generateRandomString(3)
} else {
break
}
}
pretrainModelNameMap[modelName] = ""
pathCode := fmt.Sprintf("%s_path = c2net_context.pretrain_model_path+\"/\"+\"%s\"\n", modelName, pretrainModelNames[i])
code = code + pathCode
}

//添加回传相关提示代码
code = code + "\n#输出结果必须保存在该目录\nyou_should_save_here = c2net_context.output_path\n\n"
//添加回传函数
if shouldAddUploadCode {
code = code + "\n#回传结果到openi,只有训练任务才能回传\nupload_output()\n"
}
return code
}

// 判断是否需要添加回传代码
func shouldAddUpload(jobType models.JobType) bool {
if jobType == models.JobTypeTrain {
return true
}
return false
}

func makeValidPythonVariableName(input string) string {
validName := strings.ToLower(input)
// 使用正则表达式将非字母数字下划线的字符替换为下划线
re := regexp.MustCompile(`[^a-zA-Z0-9_]`)
validName = re.ReplaceAllString(validName, "_")
re = regexp.MustCompile(`_+`)
validName = re.ReplaceAllString(validName, "_")
// 如果变量名以数字开头,添加一个下划线
if len(validName) > 0 && unicode.IsDigit(rune(validName[0])) {
validName = "_" + validName
}
if validName == "" || validName == "_" {
validName = "pretrain_model_" + generateRandomString(3)
}
return validName
}

// generateRandomString 生成指定长度的随机字母字符串
func generateRandomString(length int) string {
const letters = "abcdefghijklmnopqrstuvwxyz"

result := make([]byte, length)
for i := range result {
result[i] = letters[rand.Intn(len(letters))]
}

return string(result)
}

+ 1
- 1
services/ai_task_service/task/super_compute_task.go View File

@@ -55,7 +55,7 @@ func (t SuperComputeTaskTemplate) Create(ctx *context.CreationContext) (*entity.
Next(t.CheckPointBalance).
Next(t.CheckDatasets).
Next(t.CheckBranchExists).
Next(t.CheckModel).
Next(t.CheckModels).
Next(t.InsertCloudbrainRecord4Async).
AsyncNextWithErrFun(t.BuildContainerData, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)


+ 1
- 1
services/ai_task_service/task/task_config.go View File

@@ -41,7 +41,7 @@ func GetContainerStorageObjectPrefix(c *entity.AITaskBaseConfig, jobName string,
if st == nil && len(st) == 0 {
return ""
}
uploader := storage_helper.SelectUploaderFromStorageType(st[0])
uploader := storage_helper.SelectStorageHelperFromStorageType(st[0])
//兼容历史任务所以加上了versionName,另外云脑二训练任务为了适配modelarts接口加上了默认版本,此时要剔除
localPath := config.GetLocalPath()
localPath = strings.TrimSuffix(localPath, models.CloudbrainTwoDefaultVersion)


+ 6
- 1
services/ai_task_service/task/task_creation_info.go View File

@@ -86,8 +86,13 @@ func GetAITaskCreationInfo(req entity.GetAITaskCreationInfoReq) (*entity.Creatio
result.CanUseAllImages = canUseAll
}

c := t.GetConfig(entity.AITaskConfigKey{ComputeSource: req.ComputeSource.GetCloudbrainFormat()})
result.Config = entity.AITaskCreationConfig{
DatasetMaxSize: setting.DebugAttachSize * 1024 * 1024 * 1024,
//DatasetMaxSize: setting.DebugAttachSize * 1000 * 1000 * 1000,
DatasetMaxSize: c.DatasetsLimitSizeGB * 1024 * 1024 * 1024,
DatasetsMaxNum: c.DatasetsMaxNum,
ModelMaxSize: c.ModelLimitSizeGB * 1024 * 1024 * 1024,
ModelMaxNum: c.ModelMaxNum,
}
//查询可用节点数
if workerNums, err := t.GetAllowedWorkerNum(req.User.ID, req.ComputeSource); err == nil {


+ 33
- 47
services/ai_task_service/task/task_extend.go View File

@@ -11,50 +11,47 @@ import (
"strings"
)

func GetModelDownload(task *models.Cloudbrain) []*models.ModelDownload {
var repositoryLink string
pretrainModelList := []*models.ModelDownload{}
ckptNames := strings.Split(task.CkptName, ";")
func GetModelDownload(task *models.Cloudbrain) []*models.Model4Show {
var pretrainModelList []*models.Model4Show
var model *models.AiModelManage
var err error
if task.ModelId == "" {
model, err = models.QueryModelByPath(task.PreTrainModelUrl)
} else {
model, err = models.QueryModelById(task.ModelId)
}
if err != nil || model == nil {
return pretrainModelList
}

if r, err := models.QueryModelRepoByModelID(model.ID); err == nil {
repositoryLink = r.Link()
}
for _, ckptName := range ckptNames {
var url string
if task.Type == models.TypeC2Net {
url = getModelContainerLink(task.DataUrl, ckptName)
} else {
url = getModelLocalLink(model, ckptName)
}
modelDownload := models.ModelDownload{
Name: ckptName,
DownloadLink: url,
IsDelete: false,
ModelName: model.Name,
return []*models.Model4Show{}
}
modelIdArray := task.GetModelIdArray()
modelNameArray := task.GetModelNameArray()
for i := 0; i < len(modelIdArray); i++ {
modelId := modelIdArray[i]
model, err = models.QueryModelById(modelId)
if err != nil || model == nil {
oldModelName := ""
if len(modelNameArray) > i {
oldModelName = modelNameArray[i]
}
pretrainModelList = append(pretrainModelList, &models.Model4Show{
IsDelete: true,
Name: oldModelName,
})
continue
}
if hasModelFileDeleted(task.ModelId, ckptName) {
log.Warn("Can not get model by path:" + url)
modelDownload.IsDelete = true
var repositoryLink string
if r, err := models.QueryModelRepoByModelID(modelId); err == nil {
repositoryLink = r.Link()
}
modelDownload.RepositoryLink = repositoryLink
pretrainModelList = append(pretrainModelList, &modelDownload)
pretrainModelList = append(pretrainModelList, &models.Model4Show{
ID: modelId,
IsDelete: false,
Name: model.Name,
RepositoryLink: repositoryLink,
Size: model.Size,
})
}
return pretrainModelList
}

func getModelLocalLink(model *models.AiModelManage, ckptName string) string {
func getModelLocalLink(model *models.AiModelManage) string {
index := strings.Index(model.Path, "/")
key := model.Path[index+1:] + ckptName
key := model.Path[index+1:]
url, _ := storage.GetObsCreateSignedUrlByBucketAndKey(setting.Bucket, key)
return url
}
@@ -70,17 +67,6 @@ func GetCloudBrainDataSetInfo(task *models.Cloudbrain) []*models.DatasetDownload
datasetObsUrlList := make([]entity.NotebookDataset, 0)
_ = json.Unmarshal([]byte(task.DataUrl), &datasetObsUrlList)

for _, datasetInfo := range datasetDownload {
datasetInfo.DatasetDownloadLink = ""
for _, datasetObs := range datasetObsUrlList {
log.Info("datasetObsUrl:" + datasetObs.DatasetUrl + "datasetName:" + datasetInfo.DatasetName)
if strings.Contains(datasetObs.DatasetUrl, datasetInfo.DatasetName) {
datasetInfo.DatasetDownloadLink = datasetObs.DatasetUrl
break
}
}

}
return datasetDownload
}

@@ -198,14 +184,14 @@ func correctAITaskSpec(task *models.Cloudbrain) {
}
}

func getModelContainerLink(dataUrl string, ckptName string) string {
func getModelContainerLink(dataUrl string, modelName string) string {
if dataUrl == "" {
return ""
}
datasetObsUrlList := make([]entity.NotebookDataset, 0)
_ = json.Unmarshal([]byte(dataUrl), &datasetObsUrlList)
for _, datasetObs := range datasetObsUrlList {
if strings.Contains(datasetObs.DatasetUrl, ckptName) {
if datasetObs.DatasetName == modelName {
return datasetObs.DatasetUrl
}
}


+ 80
- 55
services/ai_task_service/task/task_service.go View File

@@ -1,15 +1,6 @@
package task

import (
"encoding/json"
"errors"
"fmt"
"net/http"
"net/url"
"path"
"strconv"
"strings"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/convert"
@@ -25,6 +16,14 @@ import (
"code.gitea.io/gitea/services/cloudbrain/cloudbrainTask"
"code.gitea.io/gitea/services/cloudbrain/resource"
"code.gitea.io/gitea/services/lock"
"encoding/json"
"errors"
"fmt"
"net/http"
"net/url"
"path"
"strconv"
"strings"
)

type QueryFunc func(opts entity.JobIdAndVersionId) (*entity.QueryTaskResponse, error)
@@ -68,13 +67,40 @@ func buildAITaskInfo(task *models.Cloudbrain, creator *models.User, config *enti
return nil, err
}
datasets := []*models.DatasetDownload{}
pretrainModelList := []*models.ModelDownload{}
pretrainModelList := []*models.Model4Show{}
var datasetNames []string
if task.Uuid != "" {
datasets = GetCloudBrainDataSetInfo(task)
for i := 0; i < len(datasets); i++ {
if datasets[i].DatasetName == "" {
continue
}
datasetNames = append(datasetNames, datasets[i].DatasetName)
}
}
if task.ModelName != "" {
var pretrainModelNames []string
if task.ModelId != "" {
pretrainModelList = GetModelDownload(task)
for i := 0; i < len(pretrainModelList); i++ {
if pretrainModelList[i].Name == "" {
continue
}
pretrainModelNames = append(pretrainModelNames, pretrainModelList[i].Name)
}
}

paramKeys := make([]string, 0)
if task.Parameters != "" {
params := parseAITaskParameters(task.Parameters)
if params != nil {
for i := 0; i < len(params.Parameter); i++ {
paramKeys = append(paramKeys, params.Parameter[i].Label)
}
}

}
code := GenerateSDKCode(datasetNames, pretrainModelNames, paramKeys, models.JobType(task.JobType))

n := 1
if task.WorkServerNumber > 1 {
n = task.WorkServerNumber
@@ -97,49 +123,48 @@ func buildAITaskInfo(task *models.Cloudbrain, creator *models.User, config *enti
baseConfig = config.BaseConfig
}
return &entity.AITaskDetailInfo{
ID: task.ID,
JobID: task.JobID,
Status: task.Status,
DetailedStatus: task.DetailedStatus,
JobType: task.JobType,
DisplayJobName: task.DisplayJobName,
FormattedDuration: task.TrainJobDuration,
ComputeSource: task.GetStandardComputeSource(),
PreVersionName: task.PreVersionName,
CurrentVersionName: task.VersionName,
WorkServerNumber: n,
Spec: convert.ToSpecification(spec),
DatasetList: datasets,
PretrainModelList: pretrainModelList,
AICenter: task.AiCenter,
BootFile: task.BootFile,
Cluster: string(entity.GetClusterTypeFromCloudbrainType(task.Type)),
Parameters: parseAITaskParameters(task.Parameters),
CreatedUnix: task.CreatedUnix,
CodePath: baseConfig.GetContainerPath(entity.ContainerCode),
DatasetPath: baseConfig.GetContainerPath(entity.ContainerDataset),
PretrainModelPath: baseConfig.GetContainerPath(entity.ContainerPreTrainModel),
OutputPath: baseConfig.GetContainerPath(entity.ContainerOutPutPath),
CodeUrl: task.RemoteCodeUrl,
PretrainModelName: task.ModelName,
PretrainModelVersion: task.ModelVersion,
PretrainCkptName: task.CkptName,
PretrainModelUrl: task.PreTrainModelUrl,
PretrainModelId: task.ModelId,
StartTime: task.StartTime,
EndTime: task.EndTime,
Description: task.Description,
FailedReason: task.FailedReason,
CommitID: task.CommitID,
BranchName: task.BranchName,
ImageName: imageName,
ImageID: imageId,
ImageUrl: imageUrl,
CreatorName: creator.GetDisplayName(),
EngineName: task.EngineName,
UserId: task.UserID,
AppName: task.AppName,
HasInternet: task.HasInternet,
ID: task.ID,
JobID: task.JobID,
Status: task.Status,
DetailedStatus: task.DetailedStatus,
JobType: task.JobType,
DisplayJobName: task.DisplayJobName,
FormattedDuration: task.TrainJobDuration,
ComputeSource: task.GetStandardComputeSource(),
PreVersionName: task.PreVersionName,
CurrentVersionName: task.VersionName,
WorkServerNumber: n,
Spec: convert.ToSpecification(spec),
DatasetList: datasets,
PretrainModelList: pretrainModelList,
SDKCode: code,
AICenter: task.AiCenter,
BootFile: task.BootFile,
Cluster: string(entity.GetClusterTypeFromCloudbrainType(task.Type)),
Parameters: parseAITaskParameters(task.Parameters),
CreatedUnix: task.CreatedUnix,
CodePath: baseConfig.GetContainerPath(entity.ContainerCode),
DatasetPath: baseConfig.GetContainerPath(entity.ContainerDataset),
PretrainModelPath: baseConfig.GetContainerPath(entity.ContainerPreTrainModel),
OutputPath: baseConfig.GetContainerPath(entity.ContainerOutPutPath),
CodeUrl: task.RemoteCodeUrl,
PretrainModelName: task.ModelName,
PretrainModelUrl: task.PreTrainModelUrl,
PretrainModelId: task.ModelId,
StartTime: task.StartTime,
EndTime: task.EndTime,
Description: task.Description,
FailedReason: task.FailedReason,
CommitID: task.CommitID,
BranchName: task.BranchName,
ImageName: imageName,
ImageID: imageId,
ImageUrl: imageUrl,
CreatorName: creator.GetDisplayName(),
EngineName: task.EngineName,
UserId: task.UserID,
AppName: task.AppName,
HasInternet: task.HasInternet,
}, nil
}

@@ -268,7 +293,7 @@ func UpdateByQueryResponse(res *entity.QueryTaskResponse, task *models.Cloudbrai
if res.DetailedStatus == "" || res.DetailedStatus == res.Status {
task.DetailedStatus = DEFAULT_DETAILED_STATUS
}
if res.StartedAt > 0 {
if res.StartedAt > 0 && task.StartTime == 0 {
task.StartTime = res.StartedAt
}
if res.StartedAt > 0 && res.CompletedAt > 0 {


+ 1
- 1
templates/admin/cloudbrain/imagecommit.tmpl View File

@@ -67,7 +67,7 @@
</div>
<div class="inline required field">
<label class="label_color" for="">{{$.i18n.Tr "repo.images"}}</label>
<input type="text" name="place" required placeholder="{{$.i18n.Tr "cloudbrain.input_mirror"}}" style="width: 80%;" maxlength="100">
<input type="text" name="place" required placeholder="{{$.i18n.Tr "cloudbrain.input_mirror"}}" style="width: 80%;" maxlength="300">
</div>
<div class="inline required field">


+ 12
- 7
templates/repo/datasets/index.tmpl View File

@@ -145,6 +145,11 @@
</style>
<div class="repository">
{{template "repo/header" .}}
<div class="ui container" style="position:relative;">
<div style="display:inline-block;position:absolute;right:{{if.dataset}}0{{else}}120px{{end}};top:10px;z-index:2">
<div id="__dataset-code-dialog"></div>
</div>
</div>
{{if .dataset}}
<div id="dataset-range-value" data-num-stars="{{.dataset.NumStars}}" data-star-active="{{$.IsStaringDataset}}"
style="display: none;">
@@ -283,11 +288,9 @@
<div class="ui grid stackable item" id="{{.UUID}}">
<div class="row">
<!-- 数据集名称 -->

<div class="four wide column" style="width: 24% !important;display: flex;align-items: center;">
<div class="four wide column" style="width: 24% !important;display: flex;align-items: center;visibility:hidden">
<el-tooltip class="item" effect="dark" placement="top" popper-class="diy-popper">
<div slot="content"><span class="wrap">

{{if ne .DecompressState -1}}{{$.i18n.Tr "dataset.unzip_status"}}:{{if eq .DecompressState 1}}{{$.i18n.Tr "dataset.unzip_successed"}}{{else if eq .DecompressState 0 2}}{{$.i18n.Tr "dataset.unzip_stared"}}{{else}}{{$.i18n.Tr "dataset.unzip_failed"}}{{end}}
&nbsp;&nbsp;{{end}}<i
class="ri-download-line"></i>{{$.i18n.Tr "dataset.download"}}:{{.DownloadCount}}
@@ -338,14 +341,14 @@
{{.CreatedUnix | TimeSinceUnix1}}
</div>
<div class="four wide column text right">
<div class="ui compact buttons">

<div class="ui compact buttons" style="display:none">
<a class="ui basic blue button" href="{{.DownloadURL}}">{{$.i18n.Tr "dataset.download"}}</a>

{{if eq .DecompressState 1}}
<a class="ui basic blue button" href="datasets/dirs/{{.UUID}}?type={{$.Type}}"
data-tooltip='{{$.i18n.Tr "dataset.directory"}}'>{{$.i18n.Tr "preview"}}</a>
{{end}}
<span class="ui basic blue button" @click="copyUseCode('{{.Name}}')">
{{$.i18n.Tr "dataset.copy_code"}}</span>
{{if and (.CanDel) (not $.Repository.IsPrivate)}}
<span class="ui basic blue button" style="color: #13c28d !important;"
@click="setPrivate('{{.UUID}}',false,{{$k}})"
@@ -436,4 +439,6 @@
</div>
{{template "base/delete_modal_actions" .}}
</div>
{{template "base/footer" .}}
{{template "base/footer" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-dataset-code-dialog.css?v={{MD5 AppVer}}" />
<script src="{{StaticUrlPrefix}}/js/vp-dataset-code-dialog.js?v={{MD5 AppVer}}"></script>

+ 0
- 1
templates/repo/debugjob/index.tmpl View File

@@ -1,6 +1,5 @@
{{template "base/head" .}}
<link rel="stylesheet" href="{{StaticUrlPrefix}}/css/vp-cloudbrain-list.css?v={{MD5 AppVer}}" />
<script>const Tasks = {{.Tasks}}; </script>
<div class="repository release dataset-list view">
{{template "repo/header" .}}
<div id="__vue-root"></div>


+ 5
- 15
templates/repo/home.tmpl View File

@@ -174,26 +174,12 @@
class="ui compact basic button">{{if .PullRequestCtx.Allowed}}{{.i18n.Tr "repo.pulls.compare_changes"}}{{else}}{{.i18n.Tr "action.compare_branch"}}{{end}}</button>
</a>
{{if and .Repository.IsFork .PullRequestCtx.Allowed}}
{{if gt .FetchUpstreamCnt 0 }}
<a
href="{{.Repository.Link}}/compare/{{.BranchName | EscapePound}}...{{.BaseRepo.Owner.Name}}:{{if .UpstreamSameBranchName}}{{.BranchName | EscapePound}}{{else}}{{.BaseRepo.DefaultBranch | EscapePound}}{{end}}">
<button id="new-pull-request" class="ui compact basic button"
title="{{$.i18n.Tr (TrN $.i18n.Lang .FetchUpstreamCnt "repo.pulls.commits_count_1" "repo.pulls.commits_count_n") .FetchUpstreamCnt}}">{{.i18n.Tr "repo.pulls.fetch_upstream"}}</button>
</a>
{{else if lt .FetchUpstreamCnt 0}}
<a
href="{{.Repository.Link}}/compare/{{.BranchName | EscapePound}}...{{.BaseRepo.Owner.Name}}:{{.BaseRepo.DefaultBranch | EscapePound}}">
<button id="new-pull-request" class="ui compact basic button"
title="{{.i18n.Tr "repo.pulls.upstream_error"}}">{{.i18n.Tr "repo.pulls.fetch_upstream"}}</button>
</a>
{{else}}
<a
href="{{.Repository.Link}}/compare/{{.BranchName | EscapePound}}...{{.BaseRepo.Owner.Name}}:{{if .UpstreamSameBranchName}}{{.BranchName | EscapePound}}{{else}}{{.BaseRepo.DefaultBranch | EscapePound}}{{end}}">
<button id="new-pull-request" class="ui compact basic button"
title="{{.i18n.Tr "repo.pulls.upstream_up_to_date"}}">{{.i18n.Tr "repo.pulls.fetch_upstream"}}</button>
title="{{.i18n.Tr "repo.pulls.fetch_upstream"}}">{{.i18n.Tr "repo.pulls.fetch_upstream"}}</button>
</a>
{{end}}
{{end}}
</div>
{{end}}
{{else}}
@@ -390,6 +376,10 @@
{{end}}
</div>
</div>
<div style="padding-top:16px;clear:both;">
<a class="use-dlg-btn" style="text-decoration:underline;"
href="https://openi.pcl.ac.cn/docs/index.html#/cloudbrain/codepath">{{.i18n.Tr "repo.code_use_resource"}}</a>
</div>
</div>
{{end}}
</div>


+ 13
- 1
templates/reward/point/rule.tmpl View File

@@ -79,7 +79,7 @@
<td class="t-center">导入新模型</td>
<td class="t-center point">-</td>
<td class="t-center"><span class="typ">每日</span>积分获取上限<span class="limit"> - </span></td>
<td>请注意模型质量,请勿重复导入相同模型,任何非常规的以导入新模型去获取 积分的行为将被认定为积分舞弊,将扣除所有积分。</td>
<td>请注意模型质量,请勿重复导入相同模型,任何非常规的以导入新模型去获取积分的行为将被认定为积分舞弊,将扣除所有积分。</td>
</tr>
<tr key="CreateCloudbrainTask">
<td class="t-center">每日运行云脑任务</td>
@@ -104,6 +104,18 @@
<td class="t-center point">-</td>
<td class="t-center"><span class="typ">累计</span>积分获取上限<span class="limit"> - </span></td>
<td>首次更换头像,获得积分。</td>
</tr>
<tr key="TaskInviteFriendRegister">
<td class="t-center">邀请好友</td>
<td class="t-center point">-</td>
<td class="t-center"><span class="typ">累计</span>积分获取上限<span class="limit"> - </span></td>
<td>邀请好友获得积分。</td>
</tr>
<tr key="TaskInviteFriendRegister">
<td class="t-center">邀请好友</td>
<td class="t-center point">-</td>
<td class="t-center"><span class="typ">累计</span>积分获取上限<span class="limit"> - </span></td>
<td>邀请好友获得积分。</td>
</tr>
</table>
</div>


+ 1
- 0
templates/user/auth/activate.tmpl View File

@@ -1,4 +1,5 @@
{{template "base/head" .}}
<script>window.IsActivatePage = true;</script>
<div class="user activate">
<div class="ui middle very relaxed page grid">
<div class="column">


+ 1
- 0
templates/user/auth/bind_phone.tmpl View File

@@ -1,4 +1,5 @@
{{template "base/head" .}}
<script>window.IsBindPhonePage = true;</script>
<div class="user bindphone forgot password" style="margin-top: 20px;">
<div class="ui middle very relaxed page grid">
<div class="column">


+ 1
- 1
templates/user/dashboard/dashboard.tmpl View File

@@ -101,7 +101,7 @@
.ui.placeholder.segment {
min-height: 15rem !important;
}
.line{
.bgtask-none.line{
border-top: 1px solid rgba(187, 187, 187, 0.5) !important;
margin-top: 20px !important;
}

+ 3
- 3
web_src/js/features/clipboard.js View File

@@ -1,9 +1,9 @@
export default async function initClipboard() {
const els = document.querySelectorAll(".clipboard");
export default async function initClipboard(elements) {
const els = elements || document.querySelectorAll(".clipboard");
if (!els || !els.length) return;

const { default: ClipboardJS } = await import(
/* webpackChunkName: "clipboard" */ "clipboard"
/* webpackChunkName: "clipboardjs" */ "clipboard"
);

const clipboard = new ClipboardJS(els);


+ 198
- 0
web_src/js/features/globalModalDlg.js View File

@@ -0,0 +1,198 @@
import SparkMD5 from "spark-md5";
import initClipboard from "./clipboard.js";
import highlight from "./highlight.js";

; (function () {
const WAIT_COUNT = 10;
const exceptPages = ['/home/term'];
const csrf = window.config.csrf;
const mdConfigReg = /^\---\n((.|\s)*?)\n---\n/;
let userID = '';
let curStorageKey = '';
const lang = document.querySelector('html').getAttribute('lang');

function init() {
userID = $('meta[name="_uid"]').attr('content');
if (!userID) { // 未登录,不显示
return;
}
curStorageKey = `g-models-${userID}`;
var pathName = window.location.pathname;
if (exceptPages.indexOf(pathName) > -1 || window.IsActivatePage || window.IsBindPhonePage) return; // 排除页,不显示
$.ajax({
type: "GET",
url: "/dashboard/invitation",
dataType: "json",
data: { filename: `tips/global/change${lang == 'zh-CN' ? '' : '_en'}.md` },
success: function (res) {
try {
res && renderMdStr(res);
} catch (err) {
console.log(err);
}
},
error: function (err) {
console.log(err);
}
});
delete window.globalModalInit;
}

function renderMdStr(str) {
if (!str) return;
const hash = SparkMD5.hash(str);
const configs = parseMdConfigs(str);
str = str.replace(mdConfigReg, '');
const dataInfoStr = window.localStorage.getItem(curStorageKey) || '{}';
let dataInfoObj = JSON.parse(dataInfoStr);
var showOr = dataInfoObj[hash] === false ? false : true;
dataInfoObj[hash] = showOr;
configs.hash = hash;
if (!showOr) return;
$.ajax({
type: "POST",
url: `/api/v1/markdown?_csrf=${csrf}`,
data: {
mode: 'gfm',
text: str,
},
success: function (res) {
try {
if (res) {
createDialog(res, configs);
window.config.HighlightJS = true;
$('.__g-modal pre code').each((function () {
highlight(this);
}));
setTimeout(() => {
initClipboard('.__g-modal .clipboard');
$('.__g-modal .clipboard').popup('hide');
}, 300);
}
} catch (err) {
console.log(err);
}
},
error: function (err) {
console.log(err);
}
});
}

function sparkMD5Hash(str = '') {
return SparkMD5.hash(str) + Math.random().toString().replace('0.', '');
}

function parseMdConfigs(mdStr) {
const obj = {};
const regex = mdConfigReg;
const res = mdStr.match(regex);
if (res && res[1]) {
const str = res[1];
const rows = str.split('\n');
for (let i = 0, iLen = rows.length; i < iLen; i++) {
const row = rows[i];
const col = row.split(':');
if (col.length > 1) {
obj[col[0].trim()] = col[1].trim();
}
}
}
return obj
}

function insertCodeCopyBtn(htmlStr) {
const html = $(htmlStr);
const codeBlocks = html.find('.code-block');
for (let i = 0, iLen = codeBlocks.length; i < iLen; i++) {
const codeBlockI = codeBlocks.eq(i);
const txt = codeBlockI.text();
const copyBtn = $(`<div class="copy-btn"><a
href="javascript:;" class="ui poping up clipboard" id="clipboard-${sparkMD5Hash(txt)}"
data-position="top center" data-variation="inverted tiny" data-success="${window.i18n.cloudeBrainMirror.copy_succeeded}"
data-content="${window.i18n.cloudeBrainMirror.copy}"
data-original="${window.i18n.cloudeBrainMirror.copy}"
data-clipboard-text=""><i style="font-size:14px;" class="copy outline icon"></i></a></div>`);
copyBtn.find('a').attr('data-clipboard-text', txt);
codeBlockI[0].outerHTML = `<div class="code-content">${codeBlockI[0].outerHTML}${copyBtn[0].outerHTML}</div>`;
}
return html.html();
}

function createDialog(html, configs) {
const dataInfoStr = window.localStorage.getItem(curStorageKey) || '{}';
let dataInfoObj = JSON.parse(dataInfoStr);
const hash = configs.hash;
function startCount(modelEl, count) {
var timer = setInterval(function () {
count--;
modelEl.data('count', count);
if (count <= 0) {
modelEl.find('.button.positive').removeClass('disabled');
modelEl.find('.count-down-c').hide();
clearInterval(timer);
} else {
modelEl.find('.count-down-c .count-down').text(count);
}
}, 1000);
modelEl.data('timer', timer);
}
const renderHtml = insertCodeCopyBtn(html);
var showOr = dataInfoObj[hash] === false ? false : true;
dataInfoObj[hash] = showOr;
if (!showOr || !renderHtml) return;
var el = $(`
<div class="ui longer large modal __g-modal" _id="g-modal-${hash}">
<div class="header" style="line-height:0.7em;font-size:1.2em;">${configs.title || window.i18n.warmPrompt}</div>
<style>
.__g-modal .code-content {position:relative;}
.__g-modal .code-block {position:relative;}
.__g-modal .copy-btn {position:absolute;right:4px;top:4px;}
</style>
<div class="content scrolling markdown">
${renderHtml}
</div>
<div class="actions" style="text-align:center;">
<div style="padding:0.78571429em 0.78571429em 0.78571429em;height:36px;display: inline-block;">
<div class="ui checkbox" >
<input type="checkbox" name="notRemindAgain">
<label>${window.i18n.notRemind}</label>
</div>
</div>
<div class="ui positive button ${WAIT_COUNT ? 'disabled' : ''}">
${window.i18n.close}${WAIT_COUNT ? `<span class="count-down-c"> (<span class="count-down">${WAIT_COUNT}</span>S)</span>` : ''}
</div>
</div>
</div>`);
$('body').append(el);
var modelEl = $(`.ui.modal[_id="g-modal-${hash}"]`);
modelEl.modal({
closable: false,
onDeny: function () { return false; },
onApprove: function (trigger) {
var modelEl = $(trigger).closest('.__g-modal');
var count = modelEl.data('count');
var indexNum = modelEl.data('index-num');
var notRemindAgain = modelEl.find('input[name="notRemindAgain"]').prop('checked');
if (Number(count) <= 0) {
if (notRemindAgain) {
const dataInfoStr = window.localStorage.getItem(curStorageKey) || '{}';
let dataInfoObj = JSON.parse(dataInfoStr);
dataInfoObj[indexNum] = false;
window.localStorage.setItem(curStorageKey, JSON.stringify(dataInfoObj));
}
}
}
}).modal('show');
modelEl.data('index-num', hash);
startCount(modelEl, WAIT_COUNT);
window.localStorage.setItem(curStorageKey, JSON.stringify(dataInfoObj));
}

window.globalModalInit = init;
setTimeout(function () {
if ($('.modal.network-security').hasClass('scale') || $('.modal.network-security').hasClass('active'))
return;
init();
}, 0);
})();

+ 1
- 1
web_src/js/features/highlight.js View File

@@ -3,7 +3,7 @@ export default async function highlight(elementOrNodeList) {
const nodes = 'length' in elementOrNodeList ? elementOrNodeList : [elementOrNodeList];
if (!nodes.length) return;

const {default: Worker} = await import(/* webpackChunkName: "highlight" */'./highlight.worker.js');
const {default: Worker} = await import(/* webpackChunkName: "highlight.worker" */'./highlight.worker.js');
const worker = new Worker();

worker.addEventListener('message', ({data}) => {


+ 10
- 0
web_src/js/features/i18nVue.js View File

@@ -144,6 +144,9 @@ export const i18nVue = {
scrolled_logs_bottom: '您已翻阅至日志底部',
scrolled_logs_bottom_pls_retry: '您已翻阅至日志底部,请稍后再试!',
computeNode: '计算节点',
notRemind: '不再提醒',
close: '关闭',
warmPrompt: '温馨提示',

cloudeBrainMirror: {
cloud_brain_mirror: '云脑镜像',
@@ -170,7 +173,9 @@ export const i18nVue = {
delete: '删除',
my_favorite_mirror: '我收藏的镜像',
more:'更多',
copy: '复制',
copy_succeeded: '复制成功!',
copy_failed: '复制失败!',
cancel_recommendation: '取消推荐',
set_as_recommended: '设为推荐',
create_cloud_brain_mirror: '创建云脑镜像',
@@ -369,6 +374,9 @@ export const i18nVue = {
scrolled_logs_bottom: 'You have scrolled to the bottom of the log',
scrolled_logs_bottom_pls_retry: 'You have scrolled to the bottom of the log, please try again later!',
computeNode: 'Compute Node',
notRemind: 'Don\'t remind again',
close: 'Close',
warmPrompt: 'Tips',

cloudeBrainMirror: {
cloud_brain_mirror: 'Cloud Brain Mirror',
@@ -395,7 +403,9 @@ export const i18nVue = {
delete: 'Delete',
my_favorite_mirror: 'My Favorite Mirror',
more:'More',
copy: 'Copy',
copy_succeeded: 'Copy succeeded!',
copy_failed: 'Copy failed',
cancel_recommendation: 'Cancel recommendation',
set_as_recommended: 'Set as recommended',
create_cloud_brain_mirror: 'Create cloud brain mirror',


+ 47
- 13
web_src/js/index.js View File

@@ -58,6 +58,7 @@ import { Message } from "element-ui";

import { i18nVue } from "./features/i18nVue.js";
import './features/ad.js';
import './features/globalModalDlg.js';
import { Fancybox } from "./vendor/fancybox.esm.js";
import "../../public/iconfonts1/iconfont";

@@ -95,7 +96,7 @@ $.fn.tab.settings.silent = true;
const viewHeight = window.innerHeight || document.documentElement.clientHeight || ''
const {top,right,bottom,left} = element.getBoundingClientRect()
return (top>=0 && left>=0 && right<viewWidth && bottom<=viewHeight)
}
}
function clickHandler() {
if(!this.parentElement.classList.contains('hidden')) {
this.parentElement.classList.add('hidden')
@@ -125,7 +126,7 @@ $.fn.tab.settings.silent = true;
}, 50)
}
window.initMarkdownCatalog = function() {
if ($('.file-view.markdown').length && $('.file-view.markdown').length > 0) {
if ($('.file-view.markdown').length && $('.file-view.markdown').length > 0) {
const navList = document.querySelectorAll('.markdown_toc .catalog-li')
const toggleIcon = document.getElementsByClassName('toggle-container')
toggleIcon[0].removeEventListener('click', clickHandler)
@@ -4030,23 +4031,55 @@ function initVueDataset() {
this.descfile = dataset_file_desc;
this.repolink = repolink;
this.datasetType = datasetType;
$('.dataset .four.wide.column').css('visibility', 'visible');
$('.dataset .ui.compact.buttons').show();
},
methods: {
copyUrl(url) {
const cInput = document.createElement("input");
const cInput = document.createElement("textarea");
cInput.value = url;
document.body.appendChild(cInput);
cInput.select();
document.execCommand("Copy");
cInput.remove();
$("body").toast({
message: "复制成功!",
message: i18n['cloudeBrainMirror']['copy_succeeded'],
showProgress: "bottom",
showIcon: "check circle",
class: "info",
position: "top right",
});
},
copyUseCode(name) {
const self = this;
$.ajax({
url: `/api/v1/ai_task/generate_sdk_code?dataset_name=${name}&_csrf=${csrf}`,
type: 'get',
data: { },
success: function(res) {
if (res.code == 0 && res.data && res.data.code) {
self.copyUrl(res.data.code);
} else {
$("body").toast({
message: i18n['cloudeBrainMirror']['copy_failed'],
showProgress: "bottom",
showIcon: "times circle circle",
class: "error",
position: "top right",
});
}
},
error: function(err) {
$("body").toast({
message: i18n['cloudeBrainMirror']['copy_failed'],
showProgress: "bottom",
showIcon: "times circle circle",
class: "error",
position: "top right",
});
}
})
},
handleCurrentChange(val) {
this.page = val;
switch (this.activeName) {
@@ -5252,7 +5285,7 @@ function initChartsNpu() {
url,
(res) => {
loadingEl && loadingEl.hide();
if (res.MetricsInfo) {
if (res.MetricsInfo) {
let filterDta = res.MetricsInfo.filter((item) => {
return ![
"recvBytesRate",
@@ -5312,11 +5345,11 @@ function initChartsNpu() {
}
const repoPath = $(this).data('path')
const versionName = $(this).data("version");
const chartEl = document.getElementById(`metric-${versionName}`);
const chartEl = document.getElementById(`metric-${versionName}`);
const multipleNode = $(chartEl).data('multiplenode');
const loadingEl = $(chartEl).siblings('.ui.dimmer');
const workServerNumber = $(chartEl).data('workservernumber');
const url = `${window.config.AppSubUrl}/api/v1/repos/${repoPath}`;
const workServerNumber = $(chartEl).data('workservernumber');
const url = `${window.config.AppSubUrl}/api/v1/repos/${repoPath}`;
const myCharts = echarts.init(chartEl);
if (multipleNode) {
const multipleNodeEl = $(chartEl).siblings('.node-select').show();
@@ -5406,10 +5439,10 @@ function initAddUsageAgreement() {
centered: false,
onShow: function () {
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$('.network-security .approve').addClass('disabled').text(`${updateTips} (${cutDown}s)`)
$('.network-security .approve').addClass('disabled').text(`${confirmTips} (${cutDown}s)`)
var timer = setInterval(function () {
cutDown--
$('.network-security .approve').addClass('disabled').text(`${updateTips} (${cutDown}s)`)
$('.network-security .approve').addClass('disabled').text(`${confirmTips} (${cutDown}s)`)
if (cutDown <= 0) {
$('.network-security .approve').removeClass('disabled').text(confirmTips)
clearInterval(timer);
@@ -5419,9 +5452,10 @@ function initAddUsageAgreement() {
onHide:function(){
},
onApprove: async function ($element) {
try {
try {
const res = await postSaveProtocolInfo(userName)
if (res?.data?.result_code === "0") {
window.globalModalInit && window.globalModalInit()
return true
} else {
return false
@@ -5441,8 +5475,8 @@ function initAddUsageAgreement() {
let params = {userName:userName}
return axios.post(`${AppSubUrl}/user/saveOtherInfo`,qs.stringify(params))
}
if ($('meta[name="_uid"]').length) {
if ($('meta[name="_uid"]').length && ['/home/term'].indexOf(window.location.pathname) < 0 && !window.IsActivatePage && !window.IsBindPhonePage) {
showLoginProtocolDialog($('meta[name="_uid"]').attr('content-ext'))
}
}
initAddUsageAgreement()
initAddUsageAgreement()

+ 1
- 1
web_src/less/_home.less View File

@@ -1715,7 +1715,7 @@ footer {
height: 130px;
width: 130px;
border: 1px solid rgba(157, 197, 226, 0.4);
box-shadow: rgb(157 197 226 / 20%) 0px 5px 10px 0px;
box-shadow: rgba(157, 197, 226, 20%) 0px 5px 10px 0px;
color: rgb(16, 16, 16);
border-radius: 6px;
display:flex;


+ 44
- 0
web_src/vuepages/apis/modules/common.js View File

@@ -11,6 +11,19 @@ export const getPromoteData = (filePathName) => {
});
}

// 获取markdown渲染结果
export const getMarkdownHtml = (str, mode) => {
return service({
url: '/api/v1/markdown',
method: 'post',
parmas: {},
data: {
mode: mode || 'gfm',
text: str,
},
});
}

// 获取个人积分信息
// return {pointAccount:{id,account_code,balance,total_earned,total_consumed,status,version,created_unix,updated_unix },cloudBrainPaySwitch}
export const getPointAccountInfo = () => {
@@ -21,3 +34,34 @@ export const getPointAccountInfo = () => {
});
}

// 由模型名称列表或数据集名称列表或运行参数列表获取SDK的代码使用方式
// dataset_name-string[], datasetNameList
// pretrain_model_name -string[] modelNameList
// param_key -string[] param_key
// job_type,compute_source,cluster_type
export const getSDKCode = (params) => {
const datasetNames = params.dataset_name || [];
const modelNames = params.pretrain_model_name || [];
const parameterKeys = params.param_key || [];
const searchParams = new URLSearchParams();
datasetNames.forEach(name => {
searchParams.append('dataset_name', name);
});
modelNames.forEach(name => {
searchParams.append('pretrain_model_name', name);
});
parameterKeys.forEach(name => {
searchParams.append('param_key', name);
});
return service({
url: `/api/v1/ai_task/generate_sdk_code?${searchParams.toString()}`,
method: 'get',
params: {
job_type: params.job_type,
compute_source: params.compute_source,
cluster_type: params.cluster_type
},
data: {},
});
}


+ 161
- 0
web_src/vuepages/components/CommonTipsDialog.vue View File

@@ -0,0 +1,161 @@

<template>
<div class="common-tips-dlg">
<div class="trigger-container" @click="handlerOpen">
<slot></slot>
</div>
<BaseDialog :visible="visible" :title="title" :show-close="showClose" @closed="closeDialog"
:appendToBody="appendToBody">
<div class="markdown" v-html="content"></div>
<div slot="footer" class="dialog-footer">
<el-button class="close-btn" size="default" type="success" @click="closeDialog">{{
$t('cloudbrainObj.dialogTips.tips8') }}</el-button>
</div>
</BaseDialog>
</div>
</template>

<script>
import BaseDialog from '~/components/BaseDialog.vue';
import { getPromoteData, getMarkdownHtml, getSDKCode } from '~/apis/modules/common';
import SparkMD5 from "spark-md5";
import hljs from 'highlight.js';
import { initClipboard } from '~/utils';
export default {
name: "CommonTipsDialog",
components: { BaseDialog },
props: {
title: { type: String, default: 'Title' },
promotePath: { type: String, default: '' },
appendToBody: { type: Boolean, default: false },
type: { type: String, default: '' },
data: { type: Object, default: () => ({}) }
},
data() {
return {
showClose: true,
visible: false,
loading: false,
content: '',
}
},
methods: {
sparkMD5Hash(str = '') {
return SparkMD5.hash(str) + Math.random().toString().replace('0.', '');
},
hljsAndInsertCopyButton(htmlStr) {
const html = document.createElement('div');
html.innerHTML = htmlStr;
const codeBlocks = html.querySelectorAll('.code-block')
for (let i = 0, iLen = codeBlocks.length; i < iLen; i++) {
const codeBlockI = codeBlocks[i];
const txt = codeBlockI.textContent;
const codeEl = codeBlockI.querySelector('code');
const code = codeEl.textContent;
codeEl.innerHTML = hljs.highlight('python', code).value;
const copyBtn = document.createElement('div');
copyBtn.classList = ['copy-btn'];
copyBtn.innerHTML = `<a href="javascript:;" class="ui poping inline up clipboard" id="clipboard-${this.sparkMD5Hash(txt)}"
data-position="top center" data-variation="inverted tiny" data-success="${this.$t('copySuccess')}"
data-content="${this.$t('copy')}" data-original="${this.$t('copy')}"
data-clipboard-text=""><i style="font-size:14px;" class="copy outline icon"></i></a>`;
copyBtn.querySelector('a').setAttribute('data-clipboard-text', txt);
codeBlockI.outerHTML = `<div class="code-content">${codeBlockI.outerHTML}${copyBtn.outerHTML}</div>`;
}
return html.innerHTML;
},
getMarkdown(str) {
getMarkdownHtml(str).then(res => {
const html = res.data;
this.content = this.hljsAndInsertCopyButton(html);
this.$nextTick(() => {
initClipboard('.base-dlg .clipboard');
});
}).catch(err => {
console.log(err);
});
},
getContent() {
if (!this.promotePath) return;
this.loading = true;
getPromoteData(this.promotePath).then(res => {
this.loading = false;
let contentStr = res.data;
if (contentStr) {
if (this.type == 'model' && this.data.name) {
getSDKCode({
pretrain_model_name: [this.data.name],
}).then(res => {
res = res.data;
if (res.code == 0 && res.data && res.data.code) {
contentStr += `\n\`\`\`python\n${res.data.code}\`\`\`\n`;
}
this.getMarkdown(contentStr);
}).catch(err => {
console.log(err);
})
} else {
this.getMarkdown(contentStr);
}
}
}).catch(err => {
this.loading = false;
console.log(err);
});
},
handlerOpen() {
this.getContent();
this.visible = true;
},
closeDialog() {
this.visible = false;
setTimeout(() => {
this.content = '';
}, 300);
}
},
mounted() {
},
}
</script>

<style scoped lang="less">
.trigger-container {
display: inline-block;
}

.markdown {
font-size: 14px;
max-height: 65vh;
overflow: auto;
padding-left: 20px;
padding-right: 20px;

/deep/ .code-content {
position: relative;

.copy-btn {
position: absolute;
right: 4px;
top: 4px;
}
}
}

.dialog-footer {
text-align: center;

.close-btn {
color: #fff;
background-color: #21ba45;
border-color: #21ba45;
font-size: 1rem;
font-weight: 700;
}
}
</style>
<style>
.ui.popup {
z-index: 2003;
}
</style>

+ 11
- 8
web_src/vuepages/components/cloudbrain/AIEngineSelect.vue View File

@@ -1,14 +1,17 @@
<template>
<div class="form-row">
<div class="title align-items-center" v-if="showTitle"><span class="required">{{ 'AI引擎' }}</span></div>
<div class="content">
<el-select class="engine-type-sel field-input" v-model="engineType" @change="handleEngineTypeChange">
<el-option v-for="item in engineTypeList" :key="item.id" :value="item.id" :label="item.name"></el-option>
</el-select>
<el-select class="engine-sel field-input" v-model="engine" @change="handleEngineChange">
<el-option v-for="item in engineList" :key="item.id" :value="item.id" :label="item.name"></el-option>
</el-select>
<div class="left-area">
<div class="title align-items-center" v-if="showTitle"><span class="required">{{ 'AI引擎' }}</span></div>
<div class="content">
<el-select class="engine-type-sel field-input" v-model="engineType" @change="handleEngineTypeChange">
<el-option v-for="item in engineTypeList" :key="item.id" :value="item.id" :label="item.name"></el-option>
</el-select>
<el-select class="engine-sel field-input" v-model="engine" @change="handleEngineChange">
<el-option v-for="item in engineList" :key="item.id" :value="item.id" :label="item.name"></el-option>
</el-select>
</div>
</div>
<div class="right-area"></div>
</div>
</template>



+ 16
- 13
web_src/vuepages/components/cloudbrain/AlgBechmarkType.vue View File

@@ -1,21 +1,24 @@
<template>
<div class="form-row">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ '评测类型' }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="content-l">
<el-select class="field-input" v-model="mainType" @change="handleInputChange">
<el-option v-for="item in mainTypeList" :key="item.key" :value="item.key" :label="item.value"></el-option>
</el-select>
<div class="left-area">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ '评测类型' }}</span>
</div>
<div class="content-r">
<div class="title align-items-center"><span class="required">{{ '子类型' }}</span></div>
<el-select class="field-input" v-model="childType" @change="handleInputChange">
<el-option v-for="item in childTypeList" :key="item.key" :value="item.key" :label="item.value"></el-option>
</el-select>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="content-l">
<el-select class="field-input" v-model="mainType" @change="handleInputChange">
<el-option v-for="item in mainTypeList" :key="item.key" :value="item.key" :label="item.value"></el-option>
</el-select>
</div>
<div class="content-r">
<div class="title align-items-center"><span class="required">{{ '子类型' }}</span></div>
<el-select class="field-input" v-model="childType" @change="handleInputChange">
<el-option v-for="item in childTypeList" :key="item.key" :value="item.key" :label="item.value"></el-option>
</el-select>
</div>
</div>
</div>
<div class="right-area"></div>
</div>
</template>



+ 13
- 40
web_src/vuepages/components/cloudbrain/BootFile.vue View File

@@ -1,11 +1,14 @@
<template>
<div class="form-row">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('modelManage.bootFile') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="currentValue" :placeholder="$t('bootPlaceholder')" @input="handleInput"
@change="handleInputChange" @keyup.native="$event.target.value=$event.target.value.replace(/^\s+|\s+$/gm,'')"></el-input>
<div class="left-area">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('modelManage.bootFile') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="currentValue" :placeholder="$t('bootPlaceholder')" @input="handleInput"
@change="handleInputChange"
@keyup.native="$event.target.value = $event.target.value.replace(/^\s+|\s+$/gm, '')"></el-input>
</div>
</div>
<div class="right-area">
<div class="btn-select">
@@ -52,16 +55,16 @@ export default {
this.errStatus = false;
} else {
const reg = /.+\.py$/;
this.errStatus = !reg.test(this.currentValue.replace(/^\s+|\s+$/gm,''));
this.errStatus = !reg.test(this.currentValue.replace(/^\s+|\s+$/gm, ''));
}
return !this.errStatus;
},
handleInput(value) {
this.currentValue = value.replace(/^\s+|\s+$/gm,'');
this.$emit('input', value.replace(/^\s+|\s+$/gm,''));
this.currentValue = value.replace(/^\s+|\s+$/gm, '');
this.$emit('input', value.replace(/^\s+|\s+$/gm, ''));
},
handleInputChange(value) {
this.$emit('change', value.replace(/^\s+|\s+$/gm,''));
this.$emit('change', value.replace(/^\s+|\s+$/gm, ''));
this.check();
},
},
@@ -72,34 +75,4 @@ export default {

<style scoped lang="less">
@import 'cloudbrain.less';

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;
}

.right-area {
display: flex;
align-items: center;

.btn-select {
display: flex;
justify-content: center;
align-items: center;
cursor: pointer;
color: rgb(3, 102, 214);

i {
margin-right: 4px;
color: rgba(0, 0, 0, .87);
}
}

a {
color: #0366d6;
}
}
}
</style>

+ 10
- 7
web_src/vuepages/components/cloudbrain/BranchName.vue View File

@@ -1,13 +1,16 @@
<template>
<div class="form-row">
<div class="title align-items-center">
<span class="required">{{ $t('cloudbrainObj.codeBranch') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-select class="field-input" v-model="currentValue" @change="handleChange">
<el-option v-for="item in branches" :key="item" :value="item" :label="item"></el-option>
</el-select>
<div class="left-area">
<div class="title align-items-center">
<span class="required">{{ $t('cloudbrainObj.codeBranch') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-select class="field-input" v-model="currentValue" @change="handleChange">
<el-option v-for="item in branches" :key="item" :value="item" :label="item"></el-option>
</el-select>
</div>
</div>
<div class="right-area"></div>
</div>
</template>



+ 11
- 14
web_src/vuepages/components/cloudbrain/DatasetSelect.vue View File

@@ -1,14 +1,16 @@
<template>
<div class="form-row">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.dataset') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="dataset-list-c">
<div class="dataset-item" v-for="(item) in selectList" :key="item.id" :title="item.name"> {{ item.name }};
</div>
<div v-if="selectList.length == 0" class="dataset-item-placeholder">
{{ $t('datasetObj.dataset_select_placeholder') }}
<div class="left-area">
<div class="title align-items-center" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.dataset') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="dataset-list-c">
<div class="dataset-item" v-for="(item) in selectList" :key="item.id" :title="item.name"> {{ item.name }};
</div>
<div v-if="selectList.length == 0" class="dataset-item-placeholder">
{{ $t('datasetObj.dataset_select_placeholder') }}
</div>
</div>
</div>
</div>
@@ -333,10 +335,6 @@ export default {

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;

.dataset-list-c {
flex: 1;
min-height: 37.6px;
@@ -373,7 +371,6 @@ export default {
}

.right-area {
// width: 250px;
display: flex;
align-items: center;



+ 44
- 34
web_src/vuepages/components/cloudbrain/FormTop.vue View File

@@ -1,52 +1,61 @@
<template>
<div>
<div class="form-row" v-if="configs.showBenchmarkMode === true">
<div class="title align-items-center"><span class="required">评测场景</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.bechmarkMode ? 'focus' : ''" v-for="item in benchmarkModeList" :key="item.key">
<span>{{ item.label }}</span>
</a>
<div class="left-area">
<div class="title align-items-center"><span class="required">评测场景</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.bechmarkMode ? 'focus' : ''" v-for="item in benchmarkModeList" :key="item.key">
<span>{{ item.label }}</span>
</a>
</div>
</div>
</div>
</div>
<div class="form-row form-row-cluster" v-if="configs.hideCluster !== true">
<div class="title align-items-center"><span class="required">{{ $t('cloudbrainObj.cluster') }}</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.cluster ? 'focus' : ''" v-for="item in configs.clusters" :key="item.key">
<i class="icon ri-global-line"></i>
<span>{{ item.label }}</span>
</a>
<div class="left-area">
<div class="title align-items-center"><span class="required">{{ $t('cloudbrainObj.cluster') }}</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.cluster ? 'focus' : ''" v-for="item in configs.clusters" :key="item.key">
<i class="icon ri-global-line"></i>
<span>{{ item.label }}</span>
</a>
</div>
</div>
</div>
</div>
<div class="form-row form-row-computer-resource" v-if="configs.hideComputerResource !== true">
<div class="title"><span class="required">{{ $t('cloudbrainObj.computeResource') }}</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.computerResouce ? 'focus' : ''" v-for="item in configs.computerResouces" :key="item.key">
<i class="icon ri-archive-drawer-line"></i>
<span>{{ item.label }}</span>
</a>
<div class="left-area">
<div class="title"><span class="required">{{ $t('cloudbrainObj.computeResource') }}</span></div>
<div class="content">
<div class="list">
<a class="item" :href="`/${repoOwnerName}/${repoName}/${item.url}`"
:class="item.key == configs.computerResouce ? 'focus' : ''" v-for="item in configs.computerResouces"
:key="item.key">
<i class="icon ri-archive-drawer-line"></i>
<span>{{ item.label }}</span>
</a>
</div>
</div>
</div>
</div>
<div class="form-row tips-c">
<div class="title"></div>
<div class="content">
<div class="tips tips-1">
<i class="ri-error-warning-line"></i>
<span>
{{ $t('cloudbrainObj.waitCountStart') }} <span>{{ queueNum }}</span> {{ $t('cloudbrainObj.waitCountEnd') }}
</span>
</div>
<div class="tips tips-2" v-if="configs.hideTips2 !== true">
<i class="ri-error-warning-line"></i>
<span v-html="configs.tips2"></span>
<div class="left-area">
<div class="title"></div>
<div class="content">
<div class="tips tips-1">
<i class="ri-error-warning-line"></i>
<span>
{{ $t('cloudbrainObj.waitCountStart') }} <span>{{ queueNum }}</span> {{ $t('cloudbrainObj.waitCountEnd') }}
</span>
</div>
<div class="tips tips-2" v-if="configs.hideTips2 !== true">
<i class="ri-error-warning-line"></i>
<span v-html="configs.tips2"></span>
</div>
</div>
</div>
</div>
@@ -95,6 +104,7 @@ export default {
display: flex;
align-items: center;
flex-wrap: wrap;

.item {
display: flex;
align-items: center;


+ 8
- 12
web_src/vuepages/components/cloudbrain/ImageSelectV1.vue View File

@@ -1,11 +1,13 @@
<template>
<div class="form-row">
<div class="title" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.image') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="imageUrl" @input="imageChange"
:placeholder="$t('cloudbrainObj.selectImagePlaceholder')"></el-input>
<div class="left-area">
<div class="title" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.image') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="imageUrl" @input="imageChange"
:placeholder="$t('cloudbrainObj.selectImagePlaceholder')"></el-input>
</div>
</div>
<div class="right-area">
<div class="btn-select" @click="dlgShow = true">
@@ -199,12 +201,6 @@ export default {
@import 'cloudbrain.less';

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;
}

.right-area {
display: flex;
align-items: center;


+ 11
- 16
web_src/vuepages/components/cloudbrain/ImageSelectV2.vue View File

@@ -1,14 +1,17 @@
<template>
<div class="form-row">
<div class="title align-items-center">
<span class="required">{{ $t('cloudbrainObj.image') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-select class="field-input" v-model="currentValue" @change="handleChange">
<el-option v-for="item in images" :key="item.image_id" :value="item.image_id"
:label="item.image_name"></el-option>
</el-select>
<div class="left-area">
<div class="title align-items-center">
<span class="required">{{ $t('cloudbrainObj.image') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-select class="field-input" v-model="currentValue" @change="handleChange">
<el-option v-for="item in images" :key="item.image_id" :value="item.image_id"
:label="item.image_name"></el-option>
</el-select>
</div>
</div>
<div class="right-area"></div>
</div>
</template>

@@ -105,12 +108,4 @@ export default {

<style scoped lang="less">
@import 'cloudbrain.less';

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;
}
}
</style>

+ 12
- 14
web_src/vuepages/components/cloudbrain/ModelSelect.vue View File

@@ -1,14 +1,17 @@
<template>
<div class="form-row">
<div class="title" v-if="showTitle"><span :class="required ? 'required' : ''">{{ $t('modelObj.model_label') }}</span>
</div>
<div class="content">
<div class="model-list-c" :class="errStatus ? 'error' : ''">
<div class="model-item" v-for="(item) in selectList" :key="item.id" :title="item._modelName + '/' + item.name">
{{ item._modelName + '/' + item.name }}
</div>
<div v-if="selectList.length == 0" class="model-item-placeholder">
{{ $t('modelObj.model_select_placeholder') }}
<div class="left-area">
<div class="title" v-if="showTitle">
<span :class="required ? 'required' : ''">{{ $t('modelObj.model_label') }}</span>
</div>
<div class="content">
<div class="model-list-c" :class="errStatus ? 'error' : ''">
<div class="model-item" v-for="(item) in selectList" :key="item.id" :title="item._modelName + '/' + item.name">
{{ item._modelName + '/' + item.name }}
</div>
<div v-if="selectList.length == 0" class="model-item-placeholder">
{{ $t('modelObj.model_select_placeholder') }}
</div>
</div>
</div>
</div>
@@ -387,10 +390,6 @@ export default {

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;

.model-list-c {
min-height: 37.6px;
border-radius: 4px;
@@ -441,7 +440,6 @@ export default {
}

.right-area {
// width: 250px;
display: flex;
align-items: center;



+ 69
- 83
web_src/vuepages/components/cloudbrain/ModelSelectV2.vue View File

@@ -1,14 +1,17 @@
<template>
<div class="form-row">
<div class="title" v-if="showTitle"><span :class="required ? 'required' : ''">{{ $t('modelObj.model_label') }}</span>
</div>
<div class="content">
<div class="model-list-c" :class="errStatus ? 'error' : ''">
<div class="model-item" v-for="(item) in selectList" :key="item.id" :title="item.name">
{{ item.name }}
</div>
<div v-if="selectList.length == 0" class="model-item-placeholder">
{{ $t('modelObj.model_label') }}
<div class="left-area">
<div class="title" v-if="showTitle">
<span :class="required ? 'required' : ''"> {{ $t('modelObj.model_label') }}</span>
</div>
<div class="content">
<div class="model-list-c" :class="errStatus ? 'error' : ''">
<div class="model-item" v-for="(item) in selectList" :key="item.id" :title="item.name">
{{ item.name }}{{ multiple ? ';' : '' }}
</div>
<div v-if="selectList.length == 0" class="model-item-placeholder">
{{ $t('modelObj.model_label') }}
</div>
</div>
</div>
</div>
@@ -38,8 +41,7 @@
</el-input>
</div>
<el-tree :data="dlgModelTreeData" ref="dlgTreeRef" highlight-current show-checkbox node-key="id"
:default-expanded-keys="dlgInitTreeNode" :props="dlgTreeProps" :index="10" accordion :check-strictly="true"
@check="onTreeCheckChange">
:props="dlgTreeProps" :index="10" accordion :check-strictly="true" @check="onTreeCheckChange">
<span slot-scope="{ node, data }" class="slot-wrap">
<span v-if="data.parent" class="custom-tree-node">
<el-tooltip v-if="data.description" placement="top-start">
@@ -51,6 +53,9 @@
{{ data.version }} </span>
</span>
<img v-if="data.recommend == 1" style="margin-left: 0.4rem" src="/img/jian.svg" />
<span class="exceed-size-tips" v-if="useExceedSize && exceedSize && data.size > exceedSize">
{{ $t('modelObj.model_exceeds_failed') }}{{ exceedSize / (1024 * 1024 * 1024) }}G
</span>
</div>
</span>
</el-tooltip>
@@ -61,6 +66,9 @@
{{ data.version }} </span>
</span>
<img v-if="data.recommend == 1" style="margin-left: 0.4rem" src="/img/jian.svg" />
<span class="exceed-size-tips" v-if="useExceedSize && exceedSize && data.size > exceedSize">
{{ $t('modelObj.model_exceeds_failed') }}{{ exceedSize / (1024 * 1024 * 1024) }}G
</span>
</div>
</span>
<span class="model-repolink model-nowrap" @click.stop="return false;">
@@ -117,6 +125,8 @@ export default {
multiple: { type: Boolean, default: false },
title: { type: String, default: '' },
showTitle: { type: Boolean, default: true },
useExceedSize: { type: Boolean, default: false, },
exceedSize: { type: Number, default: 0 },
},
data() {
return {
@@ -200,6 +210,9 @@ export default {
dataI._modelName = dataI.name;
dataI._modelVersion = dataI.version;
dataI._preTrainModelUrl = dataI.path;
if (this.useExceedSize && dataI.size > this.exceedSize && this.exceedSize) {
dataI.disabled = true;
}
_children.forEach(item => {
item.disabled = true;
item.ModTimeNum = new Date(item.ModTime).getTime();
@@ -277,7 +290,12 @@ export default {
});
this.dlgSelectedModelList.splice(index, 1);
this.dlgSelectedModel = this.dlgSelectedModelList.map(item => item.id);
this.$refs.dlgTreeRef.setCheckedKeys([]);
const children = [];
this.dlgSelectedModelList.map(_data => {
const _children = (this.dlgModelTreeData.filter(itm => itm.id == _data.id)[0] || {}).children || [];
children.push(..._children.map(item => item.id));
})
this.$refs.dlgTreeRef.setCheckedKeys(this.dlgSelectedModel.concat(children));
},
dlgPageChange(page) {
this.dlgPage = page;
@@ -286,78 +304,45 @@ export default {
onTreeCheckChange(data) {
const children = [];
if (data.parent) {
if (this.dlgSelectedModel.indexOf(data.id) >= 0) {
this.dlgSelectedModelList = [];
this.dlgSelectedModel = [];
} else {
this.dlgSelectedModelList = [data];
this.dlgSelectedModel = this.dlgSelectedModelList.map(item => item.id);
children.push(...data.children.map(item => item.id));
}
this.$refs.dlgTreeRef.setCheckedKeys(this.dlgSelectedModel.concat(children));
}
},
onTreeCheckChange1(data, nodeStatus) {
if (this.multiple) {
this.maxCount = 30;
}
let checkedNodes = (nodeStatus.checkedNodes || []).filter(item => !item.parent);
if (this.maxCount == 1 && checkedNodes.length) {
this.dlgSelectedModelList = data.parent ? checkedNodes.slice(0, 1) : [data];
} else {
let diffModel = false;
checkedNodes.map((item1) => {
this.dlgSelectedModelList.map(item2 => {
if (item1._modelID != item2._modelID) {
diffModel = true;
}
})
})
if (diffModel) {
this.$message.warning(this.$t('modelObj.model_should_same_model'));
checkedNodes.map(item => {
this.$refs.dlgTreeRef.setChecked(item.id, false, false);
});
this.dlgSelectedModelList.map(item => {
this.$refs.dlgTreeRef.setChecked(item.id, true, true);
});
return;
}
if (checkedNodes.length > this.maxCount) {
this.$message.warning(this.$t('modelObj.model_most', { msg: this.maxCount }));
const newCheckedNodes = checkedNodes.slice(0, this.maxCount);
const keys = this.dlgSelectedModelList.map(item => item.id);
for (let i = 0, iLen = newCheckedNodes.length; i < iLen; i++) {
if (keys.indexOf(newCheckedNodes[i].id) < 0 && this.dlgSelectedModelList.length < this.maxCount) {
this.dlgSelectedModelList.push(newCheckedNodes[i]);
}
}
} else {
const list = [];
const key = [];
for (let i = 0, iLen = this.dlgSelectedModelList.length; i < iLen; i++) {
const node1 = this.dlgSelectedModelList[i];
for (let j = 0, jLen = checkedNodes.length; j < jLen; j++) {
const node2 = checkedNodes[j];
if (node1.id === node2.id) {
list.push(node2);
key.push(node2.id);
if (this.multiple) {
this.maxCount = 5;
if (this.dlgSelectedModel.indexOf(data.id) >= 0) {
const index = this.dlgSelectedModelList.findIndex(item => item.id == data.id);
this.dlgSelectedModelList.splice(index, 1);
} else {
if (this.dlgSelectedModel.length >= 5) {
this.$message.warning(this.$t('modelObj.model_most', { msg: this.maxCount }));
} else if (this.dlgSelectedModelList.find(item => item.name === data.name)) {
this.$message.warning(this.$t('modelObj.model_not_equal_file'));
} else {
const curSize = data.size;
const selectedSize = this.dlgSelectedModelList.reduce((pre, _data) => {
return pre + _data.size;
}, 0);
if (this.useExceedSize && this.exceedSize && (curSize + selectedSize) > this.exceedSize) {
this.$message.warning(this.$t('modelObj.model_exceeds_failed') + `${(this.exceedSize) / (1024 * 1024 * 1024)}G`);
} else {
this.dlgSelectedModelList.push(data);
}
}
}
for (let j = 0, jLen = checkedNodes.length; j < jLen; j++) {
const node2 = checkedNodes[j];
if (key.indexOf(node2.id) < 0) {
list.push(node2);
}
this.dlgSelectedModel = this.dlgSelectedModelList.map(item => item.id);
this.dlgSelectedModelList.map(_data => {
const _children = (this.dlgModelTreeData.filter(itm => itm.id == _data.id)[0] || {}).children || [];
children.push(..._children.map(item => item.id));
})
} else {
if (this.dlgSelectedModel.indexOf(data.id) >= 0) {
this.dlgSelectedModelList = [];
this.dlgSelectedModel = [];
} else {
this.dlgSelectedModelList = [data];
this.dlgSelectedModel = this.dlgSelectedModelList.map(item => item.id);
children.push(...data.children.map(item => item.id));
}
this.dlgSelectedModelList = list;
}
this.$refs.dlgTreeRef.setCheckedKeys(this.dlgSelectedModel.concat(children));
}
this.$refs.dlgTreeRef.setCheckedKeys(this.dlgSelectedModelList.map(item => item.id), true);
this.dlgSelectedModel = this.dlgSelectedModelList.map((item) => {
return item.id;
});
},
clearSelect() {
this.dlgSelectedModelList = [];
@@ -407,10 +392,6 @@ export default {

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;

.model-list-c {
min-height: 37.6px;
border-radius: 4px;
@@ -461,7 +442,6 @@ export default {
}

.right-area {
// width: 250px;
display: flex;
align-items: center;

@@ -689,4 +669,10 @@ export default {
background: rgba(220, 220, 220, 0.8);
padding: 1px 3px;
}

.exceed-size-tips {
margin-left: 1rem;
font-size: 12px;
color: red;
}
</style>

+ 18
- 15
web_src/vuepages/components/cloudbrain/NetworkType.vue View File

@@ -1,21 +1,24 @@
<template>
<div class="form-row">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.networkType') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-radio-group v-model="currentValue" @input="handleInput">
<!-- <el-radio label="all">{{ $t('cloudbrainObj.allNetworkType') }}</el-radio> -->
<el-radio label="no_internet">{{ $t('cloudbrainObj.noInternet') }}</el-radio>
<el-radio label="has_internet">{{ $t('cloudbrainObj.hasInternet') }}</el-radio>
</el-radio-group>
<el-tooltip class="tooltip" placement="top" effect="light">
<i class="question circle icon link" style="margin-top:-7px"></i>
<div slot="content">
<div style="width:200px;text-align:center;" v-html="$t('cloudbrainObj.networkTypeDesc')"></div>
</div>
</el-tooltip>
<div class="left-area">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.networkType') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-radio-group v-model="currentValue" @input="handleInput">
<!-- <el-radio label="all">{{ $t('cloudbrainObj.allNetworkType') }}</el-radio> -->
<el-radio label="no_internet">{{ $t('cloudbrainObj.noInternet') }}</el-radio>
<el-radio label="has_internet">{{ $t('cloudbrainObj.hasInternet') }}</el-radio>
</el-radio-group>
<el-tooltip class="tooltip" placement="top" effect="light">
<i class="question circle icon link" style="margin-top:-7px"></i>
<div slot="content">
<div style="width:200px;text-align:center;" v-html="$t('cloudbrainObj.networkTypeDesc')"></div>
</div>
</el-tooltip>
</div>
</div>
<div class="right-area"></div>
</div>
</template>



+ 28
- 22
web_src/vuepages/components/cloudbrain/RunParameters.vue View File

@@ -1,29 +1,34 @@
<template>
<div class="form-row">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.runParameter') }}</span>
</div>
<div class="content">
<div class="param-list field-input">
<div class="param-item" v-for="(item) in list" :key="item.id">
<div class="param-k-v">
<el-input class="param-k" :class="item.labelError ? 'error' : ''" :placeholder="$t('cloudbrainObj.parameterName')" @input="handleInput(item)"
v-model="item.label"></el-input>
<el-input class="param-v" :class="item.valueError ? 'error' : ''" :placeholder="$t('cloudbrainObj.parameterValue')" @input="handleInput(item)"
v-model="item.value"></el-input>
</div>
<div class="param-del-btn">
<i class="trash icon" @click="removeParameter(item)"></i>
<div class="left-area">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.runParameter') }}</span>
</div>
<div class="content">
<div class="param-list field-input">
<div class="param-item" v-for="(item) in list" :key="item.id">
<div class="param-k-v">
<el-input class="param-k" :class="item.labelError ? 'error' : ''"
:placeholder="$t('cloudbrainObj.parameterName')" @input="handleInput(item)"
v-model="item.label"></el-input>
<el-input class="param-v" :class="item.valueError ? 'error' : ''"
:placeholder="$t('cloudbrainObj.parameterValue')" @input="handleInput(item)"
v-model="item.value"></el-input>
</div>
<div class="param-del-btn">
<i class="trash icon" @click="removeParameter(item)"></i>
</div>
</div>
</div>
</div>
<div class="add-param-btn">
<a href="javascript:;" @click="addParameter">
<i class="plus square outline icon"></i>
<span>{{$t('cloudbrainObj.addRunParameter')}}</span>
</a>
<div class="add-param-btn">
<a href="javascript:;" @click="addParameter">
<i class="plus square outline icon"></i>
<span>{{ $t('cloudbrainObj.addRunParameter') }}</span>
</a>
</div>
</div>
</div>
<div class="right-area"></div>
</div>
</template>

@@ -136,6 +141,7 @@ export default {
display: flex;
align-items: center;
margin-bottom: 12px;
position: relative;

.param-k-v {
flex: 1;
@@ -167,8 +173,6 @@ export default {
}

.param-v {
margin-right: 10px;

&.error {
/deep/.el-input__inner {
color: #9f3a38;
@@ -193,6 +197,8 @@ export default {
}

.param-del-btn {
position: absolute;
right: -28px;
width: 20px;

i {


+ 129
- 0
web_src/vuepages/components/cloudbrain/SDKCode.vue View File

@@ -0,0 +1,129 @@
<template>
<div>
<div class="title">
<p><span v-html="$t('cloudbrainObj.sdkCodeTip1')"></span></p>
<p v-show="codeContent"><span>{{ $t('cloudbrainObj.sdkCodeTip2') }}</span></p>
</div>
<div class="content" v-show="codeContent">
<div class="code-c">
<div class="code-content markdown">
<pre><code class="python hljs" v-html="codeHtml"></code></pre>
</div>
<div class="copy-btn sdk-code-copy">
<a href="javascript:;" class="ui poping up clipboard" :id="`clipboard-${clipboardIDStr}`"
data-position="top center" data-variation="inverted tiny" :data-success="$t('copySuccess')"
:data-content="$t('copy')" :data-original="$t('copy')" :data-clipboard-text="codeContent">
<i style="font-size:14px;" class="copy outline icon"></i>
</a>
</div>
</div>
</div>
</div>
</template>

<script>
import hljs from 'highlight.js';
import { getSDKCode } from '~/apis/modules/common';
import { initClipboard } from '~/utils';

export default {
name: 'SDKCode',
props: {
pageConfigs: { type: Object, required: true, },
formConfigs: { type: Object, required: true, },
data: { type: Object, required: true },
},
data() {
return {
repoOwnerName: location.pathname.split('/')[1],
repoName: location.pathname.split('/')[2],
codeContent: '',
clipboardIDStr: Math.random().toString().replace('0.', ''),
delayTimer: null,
};
},
watch: {
data: {
immediate: true,
deep: true,
handler(newVal) {
this.delayTimer && clearTimeout(this.delayTimer);
this.delayTimer = setTimeout(() => {
this.getCode();
}, 1500);
}
},
},
computed: {
codeHtml() {
return hljs.highlight('python', this.codeContent).value;
},
},
methods: {
getCode() {
const job_type = this.pageConfigs.taskType;
const compute_source = this.pageConfigs.computerResouce;
const cluster_type = this.pageConfigs.clusterType;
const datasetList = this.data.dataset;
const modelList = this.data.model;
const runParameterList = this.data.runParameters;
const pretrain_model_name = modelList.map(itm => itm.name);
const dataset_name = datasetList.map(itm => itm.name);
const param_key = runParameterList.filter(itm => itm.label.trim()).map(itm => itm.label.trim());
// if (pretrain_model_name.length == 0 && dataset_name.length == 0 && param_key.length == 0) {
// this.codeContent = '';
// return;
// };
const params = {
pretrain_model_name,
dataset_name,
param_key,
job_type,
compute_source,
cluster_type,
};
const curParamsStr = JSON.stringify(params);
if (this.paramsStr == curParamsStr) return;
this.paramsStr = curParamsStr;
getSDKCode(params).then(res => {
res = res.data;
if (res.code == 0 && res.data) {
this.codeContent = res.data.code || '';
this.$nextTick(() => {
initClipboard('.sdk-code-copy.copy-btn .clipboard');
});
}
}).catch(err => {
console.log(err);
})
},
check() {
return true;
},
},
mounted() { },
};
</script>

<style scoped lang="less">
.title {
margin-bottom: 10px;
}

.code-c {
position: relative;
font-size: 14px;

.code-content {
position: relative;
overflow: auto;
font-size: 14px;
}

.copy-btn {
position: absolute;
top: 0.5em;
right: 0.8em;
}
}
</style>

+ 33
- 34
web_src/vuepages/components/cloudbrain/SpecSelect.vue View File

@@ -1,40 +1,42 @@
<template>
<div class="form-row">
<div class="title"><span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.resourceSpec') }}</span></div>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="spec-list-c" v-if="!list.length">
<div class="spec-item-placeholder" style="color: red;">
{{ $t('specObj.no_use_resource') }}
</div>
</div>
<div v-else class="spec-info">
<el-select class="spec-sel field-input" :class="configs.showPoint ? 'spec-show-point' : ''" v-model="spec"
:placeholder="$t('cloudbrainObj.specPlaceholder')" @change="changeSpec">
<div slot="prefix" class="spec-sel-icon spec-op-icon">
<div :class="selIconType + '_icon _icon'">{{ selIconType[0] }}</div>
<div class="left-area">
<div class="title"><span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.resourceSpec') }}</span></div>
<div class="content" :class="errStatus ? 'error' : ''">
<div class="spec-list-c" v-if="!list.length">
<div class="spec-item-placeholder" style="color: red;">
{{ $t('specObj.no_use_resource') }}
</div>
<el-option v-for="(item) in list" :key="item.id" :label="item.specStr" :value="item.id">
<span style="float: left" class="spec-op-icon">
<div :class="item.type + '_icon _icon'">{{ item.type[0] }}</div>
</div>
<div v-else class="spec-info">
<el-select class="spec-sel field-input" :class="configs.showPoint ? 'spec-show-point' : ''" v-model="spec"
:placeholder="$t('cloudbrainObj.specPlaceholder')" @change="changeSpec">
<div slot="prefix" class="spec-sel-icon spec-op-icon">
<div :class="selIconType + '_icon _icon'">{{ selIconType[0] }}</div>
</div>
<el-option v-for="(item) in list" :key="item.id" :label="item.specStr" :value="item.id">
<span style="float: left" class="spec-op-icon">
<div :class="item.type + '_icon _icon'">{{ item.type[0] }}</div>
</span>
<span class="spec-op-spec" style="float: left">{{ item.specStr }}</span>
<span class="spec-op-point" style="float: right;" v-if="configs.showPoint">{{ item.pointStr }}</span>
</el-option>
<div slot="prefix" v-if="configs.showPoint" class="spec-sel-point"> {{ selPointStr }} </div>
</el-select>
<div class="self-point-info" v-if="configs.showPoint">
<span>{{ $t('cloudbrainObj.balanceOfPoints') }}:<span style="color:red"> {{ configs.blance }}</span>
{{ $t('cloudbrainObj.points') }}<span v-if="showUseTime">{{ $t('cloudbrainObj.canUseTime') }} <span
style="color:red">{{ canUseTime
}}</span> {{ $t('cloudbrainObj.hours') }}</span></span>
<span>
<i class="el-icon-question"></i>
<a target="_blank" href="/reward/point/rule">{{ $t('cloudbrainObj.PointGainDescr') }}</a>
</span>
<span class="spec-op-spec" style="float: left">{{ item.specStr }}</span>
<span class="spec-op-point" style="float: right;" v-if="configs.showPoint">{{ item.pointStr }}</span>
</el-option>
<div slot="prefix" v-if="configs.showPoint" class="spec-sel-point"> {{ selPointStr }} </div>
</el-select>
<div class="self-point-info" v-if="configs.showPoint">
<span>{{ $t('cloudbrainObj.balanceOfPoints') }}:<span style="color:red"> {{ configs.blance }}</span>
{{ $t('cloudbrainObj.points') }}<span v-if="showUseTime">{{ $t('cloudbrainObj.canUseTime') }} <span
style="color:red">{{ canUseTime
}}</span> {{ $t('cloudbrainObj.hours') }}</span></span>
<span>
<i class="el-icon-question"></i>
<a target="_blank" href="/reward/point/rule">{{ $t('cloudbrainObj.PointGainDescr') }}</a>
</span>
</div>
</div>
</div>
</div>
<div class="resource-descr-c">
<div class="right-area resource-descr-c">
<div class="resource-descr">
<el-tooltip placement="top" effect="light">
<i class="question circle icon link" style="margin-top:-7px"></i>
@@ -167,10 +169,6 @@ export default {

.form-row {
.content {
flex: inherit;
width: 50%;
margin-right: 5px;

.spec-list-c {
min-height: 37.6px;
border-radius: 4px;
@@ -241,6 +239,7 @@ export default {
}

.resource-descr-c {
align-items: flex-start;
margin-top: 8px;

.resource-descr {


+ 11
- 8
web_src/vuepages/components/cloudbrain/TaskDescr.vue View File

@@ -1,13 +1,16 @@
<template>
<div class="form-row">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.taskDescr') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" type="textarea" v-model="currentValue" :rows="3"
:placeholder="$t('cloudbrainObj.taskDescrPlaceholder')" :maxlength="255" @blur="handleBlur" @focus="handleFocus"
@input="handleInput" @change="handleInputChange"></el-input>
<div class="left-area">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.taskDescr') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" type="textarea" v-model="currentValue" :rows="3"
:placeholder="$t('cloudbrainObj.taskDescrPlaceholder')" :maxlength="255" @blur="handleBlur" @focus="handleFocus"
@input="handleInput" @change="handleInputChange"></el-input>
</div>
</div>
<div class="right-area"></div>
</div>
</template>

@@ -68,7 +71,7 @@ export default {
<style scoped lang="less">
@import 'cloudbrain.less';

.form-row .content .field-input /deep/ .el-textarea__inner {
.form-row .left-area .content .field-input /deep/ .el-textarea__inner {
line-height: 1.2857;
padding: 0.78571429em 1em;
}


+ 12
- 8
web_src/vuepages/components/cloudbrain/TaskName.vue View File

@@ -1,14 +1,18 @@
<template>
<div class="form-row">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.taskName') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="currentValue" @input="handleInput" @change="handleInputChange" :maxlength="type == 'supercompute' ? 26 : 36"
:placeholder="$t('cloudbrainObj.taskName')" :autofocus="autofocus"></el-input>
<div v-if="type === 'supercompute'" class="tips">{{ $t('cloudbrainObj.taskNameTips1') }}</div>
<div v-else class="tips">{{ $t('cloudbrainObj.taskNameTips') }}</div>
<div class="left-area">
<div class="title">
<span :class="required ? 'required' : ''">{{ $t('cloudbrainObj.taskName') }}</span>
</div>
<div class="content" :class="errStatus ? 'error' : ''">
<el-input class="field-input" v-model="currentValue" @input="handleInput" @change="handleInputChange"
:maxlength="type == 'supercompute' ? 26 : 36" :placeholder="$t('cloudbrainObj.taskName')"
:autofocus="autofocus"></el-input>
<div v-if="type === 'supercompute'" class="tips">{{ $t('cloudbrainObj.taskNameTips1') }}</div>
<div v-else class="tips">{{ $t('cloudbrainObj.taskNameTips') }}</div>
</div>
</div>
<div class="right-area"></div>
</div>
</template>



Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save