#5083 合并启智迁移到智算相关功能。

Merged
ychao_1983 merged 69 commits from zouap_dev into V20240109 4 months ago
  1. +14
    -0
      models/ai_model_manage.go
  2. +4
    -4
      models/cloudbrain.go
  3. +1
    -1
      models/user_business_analysis.go
  4. +0
    -1
      modules/context/context.go
  5. +24
    -14
      modules/git/repo_stats_custom.go
  6. +10
    -4
      modules/setting/setting.go
  7. +1
    -1
      modules/templates/helper.go
  8. +1
    -0
      options/locale/locale_en-US.ini
  9. +1
    -0
      options/locale/locale_zh-CN.ini
  10. +2
    -0
      public/home/search.js
  11. +1
    -1
      routers/api/v1/api.go
  12. +0
    -214
      routers/api/v1/repo/cloudbrain.go
  13. +264
    -0
      routers/api/v1/repo/modelmanage.go
  14. +348
    -78
      routers/repo/ai_model_convert.go
  15. +6
    -5
      routers/repo/cloudbrain.go
  16. +2
    -2
      routers/repo/repo_statistic.go
  17. +97
    -1
      routers/repo/util.go
  18. +2
    -5
      routers/user/auth.go
  19. +11
    -11
      services/cloudbrain/clear.go
  20. +2
    -2
      templates/repo/cloudbrain/benchmark/index.tmpl
  21. +2
    -10
      templates/repo/cloudbrain/benchmark/new.tmpl
  22. +10
    -25
      templates/repo/modelmanage/convertshowinfo.tmpl
  23. +2
    -27
      templates/repo/modelsafety/new.tmpl
  24. +2
    -0
      web_src/js/features/i18nVue.js
  25. +1
    -0
      web_src/vuepages/langs/config/en-US.js
  26. +1
    -0
      web_src/vuepages/langs/config/zh-CN.js
  27. +1
    -0
      web_src/vuepages/pages/dataset/square/constant.js

+ 14
- 0
models/ai_model_manage.go View File

@@ -227,6 +227,20 @@ func UpdateModelConvertCBTI(id string, CloudBrainTaskId string) error {
return nil
}

func UpdateResultMigrateFlag(id string, resultMigrateFlag string) error {
var sess *xorm.Session
sess = x.ID(id)
defer sess.Close()
re, err := sess.Cols("status_result").Update(&AiModelConvert{
StatusResult: resultMigrateFlag,
})
if err != nil {
return err
}
log.Info("success to update resultMigrateFlag from db.re=" + fmt.Sprint((re)))
return nil
}

func UpdateModelConvert(job *AiModelConvert) error {
return updateModelConvert(x, job)
}


+ 4
- 4
models/cloudbrain.go View File

@@ -2750,7 +2750,7 @@ func GetGPUStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error)
JobStopped, JobSucceeded, JobFailed, ModelArtsCreateFailed, ModelArtsStartFailed, ModelArtsUnavailable, ModelArtsResizFailed, ModelArtsDeleted,
ModelArtsStopped, ModelArtsTrainJobCanceled, ModelArtsTrainJobCheckFailed, ModelArtsTrainJobCompleted, ModelArtsTrainJobDeleteFailed, ModelArtsTrainJobDeployServiceFailed,
ModelArtsTrainJobFailed, ModelArtsTrainJobImageFailed, ModelArtsTrainJobKilled, ModelArtsTrainJobLost, ModelArtsTrainJobSubmitFailed, ModelArtsTrainJobSubmitModelFailed).
Where("(((end_time is null or end_time=0) and updated_unix<? and updated_unix != 0 ) or (end_time<? and end_time != 0)) and cleared=false and job_type != 'DEBUG' and (type=0 or (type =2 and compute_resource='CPU/GPU'))", missEndTimeBefore, endTimeBefore).
Where("(((end_time is null or end_time=0) and updated_unix<? and updated_unix != 0 ) or (end_time<? and end_time != 0)) and cleared=false and job_type != 'DEBUG' and (type=0 or type=2)", missEndTimeBefore, endTimeBefore).
Limit(limit).
Find(&cloudbrains)
}
@@ -2764,7 +2764,7 @@ func GetNPUStoppedNotDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error)
JobStopped, JobSucceeded, JobFailed, ModelArtsCreateFailed, ModelArtsStartFailed, ModelArtsUnavailable, ModelArtsResizFailed, ModelArtsDeleted,
ModelArtsStopped, ModelArtsTrainJobCanceled, ModelArtsTrainJobCheckFailed, ModelArtsTrainJobCompleted, ModelArtsTrainJobDeleteFailed, ModelArtsTrainJobDeployServiceFailed,
ModelArtsTrainJobFailed, ModelArtsTrainJobImageFailed, ModelArtsTrainJobKilled, ModelArtsTrainJobLost, ModelArtsTrainJobSubmitFailed, ModelArtsTrainJobSubmitModelFailed).
Where("updated_unix<? and updated_unix != 0 and cleared=false and job_type != 'DEBUG' and (type=1 or (type =2 and compute_resource='NPU'))", endTimeBefore).
Where("updated_unix<? and updated_unix != 0 and cleared=false and job_type != 'DEBUG' and type=1", endTimeBefore).
Limit(limit).
Find(&cloudbrains)
}
@@ -2783,7 +2783,7 @@ func GetGPUStoppedDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
FROM cloudbrain
where job_type='DEBUG'
ORDER BY job_name, updated_unix DESC) a
where status in ('STOPPED','SUCCEEDED','FAILED') and (((end_time is null or end_time=0) and updated_unix<? and updated_unix != 0 ) or (end_time<? and end_time != 0)) and (type=0 or (type =2 and compute_resource='CPU/GPU')) and cleared=false`
where status in ('STOPPED','SUCCEEDED','FAILED') and (((end_time is null or end_time=0) and updated_unix<? and updated_unix != 0 ) or (end_time<? and end_time != 0)) and (type=0 or type =2 ) and cleared=false`
//
return cloudbrains, x.Unscoped().SQL(sql, missEndTimeBefore, endTimeBefore).Limit(limit).Find(&cloudbrains)

@@ -2797,7 +2797,7 @@ func GetNPUStoppedDebugJobDaysAgo(days int, limit int) ([]*Cloudbrain, error) {
FROM cloudbrain
where job_type='DEBUG'
ORDER BY job_name, updated_unix DESC) a
where status in ('STOPPED','SUCCEEDED','FAILED') and updated_unix<? and updated_unix != 0 and (type=1 or (type =2 and compute_resource='NPU')) and cleared=false`
where status in ('STOPPED','SUCCEEDED','FAILED') and updated_unix<? and updated_unix != 0 and type=1 and cleared=false`
//(type=0 or (type =2 and compute_resource='CPU/GPU')) and
return cloudbrains, x.Unscoped().SQL(sql, endTimeBefore).Limit(limit).Find(&cloudbrains)



+ 1
- 1
models/user_business_analysis.go View File

@@ -579,7 +579,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
var CommitCodeSizeMap map[string]*git.UserKPIStats
var err error
var existCommitCodeSize map[int64]int
if tableName == "user_business_analysis_all" {
if tableName == "user_business_analysis_all" || tableName == "user_business_analysis_current_year" {
oneDayStartTime := pageEndTime.AddDate(0, 0, -1)
oneDayStartTime = time.Date(oneDayStartTime.Year(), oneDayStartTime.Month(), oneDayStartTime.Day(), 0, 0, 0, 1, oneDayStartTime.Location())
if oneDayStartTime.Format("2006-01-02") == pageStartTime.Format("2006-01-02") {


+ 0
- 1
modules/context/context.go View File

@@ -302,7 +302,6 @@ func Contexter() macaron.Handler {
})))
return
}

// Get user from session if logged in.
ctx.User, ctx.IsBasicAuth = auth.SignedInUser(ctx.Context, ctx.Session)



+ 24
- 14
modules/git/repo_stats_custom.go View File

@@ -100,21 +100,25 @@ func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (m
CommitLines: 0,
}
}

usersKPIStatses[email].Commits++
default: // Changed file
//Log.Info("code commit file=" + l)
if parts := strings.Fields(l); len(parts) >= 3 {
if parts[0] != "-" {
if c, err := strconv.ParseInt(strings.TrimSpace(parts[0]), 10, 64); err == nil {
usersKPIStatses[email].CommitLines += c
if isCodeFile(parts[2]) {
if parts[0] != "-" {
if c, err := strconv.ParseInt(strings.TrimSpace(parts[0]), 10, 64); err == nil {
usersKPIStatses[email].CommitLines += c
}
}
}
if parts[1] != "-" {
if c, err := strconv.ParseInt(strings.TrimSpace(parts[1]), 10, 64); err == nil {
usersKPIStatses[email].CommitLines += c
if parts[1] != "-" {
if c, err := strconv.ParseInt(strings.TrimSpace(parts[1]), 10, 64); err == nil {
usersKPIStatses[email].CommitLines += c
}
}
if usersKPIStatses[email].CommitLines > 100000 {
Log.Info("count code more than 100000, email=" + email + " lines=" + fmt.Sprint(usersKPIStatses[email].CommitLines) + " repo=" + repoPath)
}
}

}
}
}
@@ -123,12 +127,18 @@ func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (m

}

//获取一天内的用户贡献指标
func getUserKPIStats(repoPath string) (map[string]*UserKPIStats, error) {
timeUntil := time.Now()
oneDayAgo := timeUntil.AddDate(0, 0, -1)
var (
codePostfix []string = []string{".py", ".go", ".js", ".html", ".css", ".c", ".cpp", ".h", ".hpp", ".java", ".sql", ".php", ".rb", ".vue", ".yml", ".swift", ".kt", ".sh", ".pl", ".rs", ".lua", ".ts", ".m"}
)

return GetUserKPIStats(repoPath, oneDayAgo, oneDayAgo)
func isCodeFile(name string) bool {
lowerName := strings.ToLower(name)
for _, v := range codePostfix {
if strings.HasSuffix(lowerName, v) {
return true
}
}
return false
}

func SetRepoKPIStats(repoPath string, fromTime time.Time, stats *RepoKPIStats, newContributers map[string]struct{}) error {


+ 10
- 4
modules/setting/setting.go View File

@@ -838,7 +838,10 @@ var (
TensorFlowNpuBootFile string
TensorFlowGpuBootFile string
ConvertRepoPath string
GPU_Resource_Specs_ID int
GPU_Resource_Specs_ID string
GPU_AiCenter_Code string
GPU_Spec_ID int64
GPU_Spec_AccCardType string
NPU_FlavorCode string
NPU_PoolID string
NPU_MINDSPORE_IMAGE_ID int
@@ -1842,7 +1845,7 @@ func getModelSafetyConfig() {

func getModelConvertConfig() {
sec := Cfg.Section("model_convert")
ModelConvert.GPU_PYTORCH_IMAGE = sec.Key("GPU_PYTORCH_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:tensorRT_7_zouap")
ModelConvert.GPU_PYTORCH_IMAGE = sec.Key("GPU_PYTORCH_IMAGE").MustString("192.168.204.28:5000/default-workspace/99280a9940ae44ca8f5892134386fddb/image:tensorrt_8.0_for_c2net_1")
ModelConvert.GpuQueue = sec.Key("GpuQueue").MustString("openidgx")
ModelConvert.GPU_TENSORFLOW_IMAGE = sec.Key("GPU_TENSORFLOW_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:tf2onnx")
ModelConvert.NPU_MINDSPORE_16_IMAGE = sec.Key("NPU_MINDSPORE_16_IMAGE").MustString("swr.cn-south-222.ai.pcl.cn/openi/mindspore1.8.1_train_openi_new:v1")
@@ -1852,13 +1855,16 @@ func getModelConvertConfig() {
ModelConvert.TensorFlowNpuBootFile = sec.Key("TensorFlowNpuBootFile").MustString("convert_tensorflow.py")
ModelConvert.TensorFlowGpuBootFile = sec.Key("TensorFlowGpuBootFile").MustString("convert_tensorflow_gpu.py")
ModelConvert.ConvertRepoPath = sec.Key("ConvertRepoPath").MustString("https://openi.pcl.ac.cn/zouap/npu_test")
ModelConvert.GPU_Resource_Specs_ID = sec.Key("GPU_Resource_Specs_ID").MustInt(1)
ModelConvert.GPU_Resource_Specs_ID = sec.Key("GPU_Resource_Specs_ID").MustString("e677036134cd11ed9c2a06e51cc0c06b")
ModelConvert.GPU_AiCenter_Code = sec.Key("GPU_AiCenter_Code").MustString("cloudbrain1")
ModelConvert.GPU_Spec_ID = sec.Key("GPU_Spec_ID").MustInt64(183)
ModelConvert.GPU_Spec_AccCardType = sec.Key("GPU_Spec_AccCardType").MustString("A100")
ModelConvert.NPU_FlavorCode = sec.Key("NPU_FlavorCode").MustString("modelarts.bm.910.arm.public.1")
ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a")
ModelConvert.NPU_MINDSPORE_IMAGE_ID = sec.Key("NPU_MINDSPORE_IMAGE_ID").MustInt(37)
ModelConvert.NPU_TENSORFLOW_IMAGE_ID = sec.Key("NPU_TENSORFLOW_IMAGE_ID").MustInt(38)
ModelConvert.GPU_PADDLE_IMAGE = sec.Key("GPU_PADDLE_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:paddle2.3.0_gpu_cuda11.2_cudnn8")
ModelConvert.GPU_MXNET_IMAGE = sec.Key("GPU_MXNET_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:mxnet191cu_cuda102_py37")
ModelConvert.GPU_MXNET_IMAGE = sec.Key("GPU_MXNET_IMAGE").MustString("192.168.204.22:5000/default-workspace/99280a9940ae44ca8f5892134386fddb/image:mxnet191cu_cuda102_py37_c2net")
ModelConvert.PaddleOnnxBootFile = sec.Key("PaddleOnnxBootFile").MustString("convert_paddle.py")
ModelConvert.MXnetOnnxBootFile = sec.Key("MXnetOnnxBootFile").MustString("convert_mxnet.py")
}


+ 1
- 1
modules/templates/helper.go View File

@@ -796,7 +796,7 @@ func buildSubjectBodyTemplate(stpl *texttmpl.Template, btpl *template.Template,

// Dataset categories
func categories() []string {
return []string{"computer_vision", "natural_language_processing", "speech_processing", "computer_vision_natural_language_processing"}
return []string{"computer_vision", "natural_language_processing", "speech_processing", "computer_vision_natural_language_processing", "medical_imaging"}
}

func licenses() []string {


+ 1
- 0
options/locale/locale_en-US.ini View File

@@ -955,6 +955,7 @@ category.computer_vision= computer vision
category.natural_language_processing= natural language processing
category.speech_processing= speech processing
category.computer_vision_natural_language_processing= computer vision and natural language processing
category.medical_imaging = medical imaging
attachment.delete= Delete this version of dataset
attachment.delete_desc= Are you sure you will delete this version of dataset, once deleted can not be recovery
public= public


+ 1
- 0
options/locale/locale_zh-CN.ini View File

@@ -959,6 +959,7 @@ category.computer_vision=计算机视觉
category.natural_language_processing=自然语言处理
category.speech_processing=语音处理
category.computer_vision_natural_language_processing=计算机视觉、自然语言处理
category.medical_imaging = 医学影像
attachment.delete= 删除该版本的数据集
attachment.delete_desc= 你确定要删除该版本的数据集么?一旦删除不能恢复。
public=公有


+ 2
- 0
public/home/search.js View File

@@ -346,6 +346,7 @@ var categoryDesc = {
natural_language_processing: "自然语言处理",
speech_processing: "语音处理",
computer_vision_natural_language_processing: "计算机视觉、自然语言处理",
medical_imaging: "医学影像",
};

var categoryENDesc = {
@@ -354,6 +355,7 @@ var categoryENDesc = {
speech_processing: "speech processing",
computer_vision_natural_language_processing:
"computer vision and natural language processing",
medical_imaging: "medical imaging",
};

var taskDesc = {


+ 1
- 1
routers/api/v1/api.go View File

@@ -1321,7 +1321,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Get("/download_model_convert_resultfile", repo.DownloadModeConvertResultFile)

m.Get("/:id", repo.GetCloudbrainModelConvertTask)
m.Get("/:id/log", repo.CloudbrainForModelConvertGetLog)
m.Get("/:id/log", repo.GrampusForModelConvertGetLog)
m.Get("/:id/modelartlog", repo.TrainJobForModelConvertGetLog)
m.Get("/:id/model_list", repo.CloudBrainModelConvertList)
}, reqRepoReader(models.UnitTypeModelManage))


+ 0
- 214
routers/api/v1/repo/cloudbrain.go View File

@@ -651,145 +651,6 @@ func InferencJobResultList(ctx *context.APIContext) {

}

func GetCloudbrainModelConvertTask(ctx *context.APIContext) {
var (
err error
)
ID := ctx.Params(":id")
job, err := models.QueryModelConvertById(ID)
if err != nil {
ctx.NotFound(err)
log.Error("GetCloudbrainByID failed:", err)
return
}
if job.IsGpuTrainTask() {
jobResult, err := cloudbrain.GetJob(job.CloudBrainTaskId)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
result, _ := models.ConvertToJobResultPayload(jobResult.Payload)
if err != nil {
ctx.NotFound(err)
log.Error("ConvertToJobResultPayload failed:", err)
return
}

job.Status = result.JobStatus.State
taskRoles := result.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
if result.JobStatus.State != string(models.JobWaiting) && result.JobStatus.State != string(models.JobFailed) {
job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
}

if result.JobStatus.State != string(models.JobWaiting) {
models.ModelComputeAndSetDuration(job, result)
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}
ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobName": result.Config.JobName,
"JobStatus": result.JobStatus.State,
"SubState": result.JobStatus.SubState,
"CreatedTime": time.Unix(result.JobStatus.CreatedTime/1000, 0).Format("2006-01-02 15:04:05"),
"CompletedTime": time.Unix(result.JobStatus.CompletedTime/1000, 0).Format("2006-01-02 15:04:05"),
})
} else {

result, err := modelarts.GetTrainJob(job.CloudBrainTaskId, job.ModelArtsVersionId)
if err != nil {
log.Error("get modelart job failed:", err)
ctx.NotFound(err)
return
}

job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
job.RunTime = result.Duration / 1000
job.TrainJobDuration = models.ConvertDurationToStr(job.RunTime)
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}

ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobStatus": job.Status,
})

}

}

func CloudbrainGetLogByJobId(jobId string, jobName string) map[string]interface{} {
var hits []models.Hits
result, err := cloudbrain.GetJobLog(jobId)
if err != nil {
log.Error("GetJobLog failed: %v", err)
return nil
}
hits = result.Hits.Hits

//if the size equal page_size, then take the scroll_id to get all log and delete the scroll_id(the num of scroll_id is limited)
if len(result.Hits.Hits) >= cloudbrain.LogPageSize {
for {
resultNext, err := cloudbrain.GetJobAllLog(result.ScrollID)
if err != nil {
log.Error("GetJobAllLog failed: %v", err)
} else {
for _, hit := range resultNext.Hits.Hits {
hits = append(hits, hit)
}
}

if len(resultNext.Hits.Hits) < cloudbrain.LogPageSize {
log.Info("get all log already")
break
}
}
}

cloudbrain.DeleteJobLogToken(result.ScrollID)

sort.Slice(hits, func(i, j int) bool {
return hits[i].Sort[0] < hits[j].Sort[0]
})

var content string
for _, log := range hits {
content += log.Source.Message + "\n"
}

return map[string]interface{}{
"JobName": jobName,
"Content": content,
}

}

func CloudbrainForModelConvertGetLog(ctx *context.Context) {
ID := ctx.Params(":id")
job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobName failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}

result := CloudbrainGetLogByJobId(job.CloudBrainTaskId, job.Name)
if result == nil {
log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}
ctx.JSON(http.StatusOK, result)
}

func ModelSafetyGetLog(ctx *context.APIContext) {
ID := ctx.Params(":id")
job, err := models.GetCloudbrainByID(ID)
@@ -1240,81 +1101,6 @@ func getLogFromModelDir(jobName string, startLine int, endLine int, resultPath s
}
}

func CloudBrainModelConvertList(ctx *context.APIContext) {
ID := ctx.Params(":id")
parentDir := ctx.Query("parentDir")
dirArray := strings.Split(parentDir, "/")
var versionName = "V0001"

job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobID(%s) failed:%v", job.Name, err.Error())
ctx.ServerError("GetModelDirs failed:", err)
return
}
result, err := QueryModelConvertResultFileList(ctx, ID)
if err == nil {
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobID": job.ID,
"VersionName": versionName,
"StatusOK": 0,
"Path": dirArray,
"Dirs": result,
"task": job,
"PageIsCloudBrain": true,
})
} else {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.ServerError("GetModelDirs failed:", err)
return
}
}

func QueryModelConvertResultFileList(ctx *context.APIContext, id string) ([]storage.FileInfo, error) {
ID := id
parentDir := ctx.Query("parentDir")
job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobID(%s) failed:%v", job.Name, err.Error())
return nil, err
}
if job.IsGpuTrainTask() {
//get dirs
dirs, err := routerRepo.GetModelDirs(job.ID, parentDir)
if err != nil {
log.Error("GetModelDirs failed:%v", err.Error(), ctx.Data["msgID"])
return nil, err
}

var fileInfos []storage.FileInfo
err = json.Unmarshal([]byte(dirs), &fileInfos)
if err != nil {
log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"])
return nil, err
}

for i, fileInfo := range fileInfos {
temp, _ := time.Parse("2006-01-02 15:04:05", fileInfo.ModTime)
fileInfos[i].ModTime = temp.Local().Format("2006-01-02 15:04:05")
}

sort.Slice(fileInfos, func(i, j int) bool {
return fileInfos[i].ModTime > fileInfos[j].ModTime
})

return fileInfos, nil
} else {
var versionName = "V0001"
models, err := storage.GetObsListObject(job.ID, "output/", parentDir, versionName)
if err != nil {
log.Info("get TrainJobListModel failed:", err)
return nil, err
}
return models, nil
}

}

func CloudBrainModelList(ctx *context.APIContext) {
var jobID = ctx.Params(":jobid")
var versionName = ctx.Query("version_name")


+ 264
- 0
routers/api/v1/repo/modelmanage.go View File

@@ -1,14 +1,27 @@
package repo

import (
"encoding/json"
"fmt"
"net/http"
"sort"
"strings"
"time"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/convert"
"code.gitea.io/gitea/modules/grampus"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/modelarts"
"code.gitea.io/gitea/modules/redis/redis_key"
"code.gitea.io/gitea/modules/redis/redis_lock"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
api "code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
routerRepo "code.gitea.io/gitea/routers/repo"
"code.gitea.io/gitea/services/ai_task_service/schedule"
)

type FileInfo struct {
@@ -204,3 +217,254 @@ func DeleteModelFile(ctx *context.APIContext) {
log.Info("DeleteModelFile by api.")
routerRepo.DeleteModelFile(ctx.Context)
}

func GetCloudbrainModelConvertTask(ctx *context.APIContext) {
var (
err error
)
ID := ctx.Params(":id")
job, err := models.QueryModelConvertById(ID)
if err != nil {
ctx.NotFound(err)
log.Error("GetCloudbrainByID failed:", err)
return
}
if job.IsGpuTrainTask() {
jobResult, err := grampus.GetJob(job.CloudBrainTaskId)
if err != nil {
ctx.NotFound(err)
log.Error("GetJob failed:", err)
return
}
jobResultJson, _ := json.Marshal(jobResult)
log.Info("grampus jobResultJson=" + string(jobResultJson))

if jobResult.JobInfo.Status == models.GrampusStatusPending || jobResult.JobInfo.Status == "" {
job.Status = models.GrampusStatusWaiting
} else {
job.Status = strings.ToUpper(jobResult.JobInfo.Status)
}
if jobResult.JobInfo.CompletedAt > 0 {
job.EndTime = timeutil.TimeStamp(jobResult.JobInfo.CompletedAt)
}
if strings.ToUpper(jobResult.JobInfo.Status) != models.GrampusStatusWaiting && jobResult.JobInfo.Status != models.GrampusStatusPending {
models.ModelConvertSetDuration(job)
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}
}
doGrampusModelConvertMigrate(jobResult.JobInfo.Status, job)
ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobName": jobResult.JobInfo.Name,
"JobStatus": job.Status,
"SubState": "",
"CreatedTime": time.Unix(jobResult.JobInfo.CreatedAt, 0).Format("2006-01-02 15:04:05"),
"CompletedTime": time.Unix(jobResult.JobInfo.CompletedAt, 0).Format("2006-01-02 15:04:05"),
})
} else {

result, err := modelarts.GetTrainJob(job.CloudBrainTaskId, job.ModelArtsVersionId)
if err != nil {
log.Error("get modelart job failed:", err)
ctx.NotFound(err)
return
}

job.Status = modelarts.TransTrainJobStatus(result.IntStatus)
job.RunTime = result.Duration / 1000
job.TrainJobDuration = models.ConvertDurationToStr(job.RunTime)
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateJob failed:", err)
}

ctx.JSON(http.StatusOK, map[string]interface{}{
"ID": ID,
"JobStatus": job.Status,
})

}

}

func GrampusTaskGetLogByJobId(jobId string, jobName string) map[string]interface{} {
var content string
result, err := grampus.GetTrainJobLog(jobId)
if err != nil {
log.Error("GetJobLog failed: %v", err)
content = ""
} else {
content = result
}
return map[string]interface{}{
"JobName": jobName,
"Content": content,
}
}

func GrampusForModelConvertGetLog(ctx *context.Context) {
ID := ctx.Params(":id")
job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobName failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}

result := GrampusTaskGetLogByJobId(job.CloudBrainTaskId, job.Name)
if result == nil {
log.Error("GetJobLog failed: %v", err, ctx.Data["MsgID"])
ctx.ServerError(err.Error(), err)
return
}
ctx.JSON(http.StatusOK, result)
}

func CloudBrainModelConvertList(ctx *context.APIContext) {
ID := ctx.Params(":id")
parentDir := ctx.Query("parentDir")
dirArray := strings.Split(parentDir, "/")
var versionName = "V0001"

job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobID(%s) failed:%v", job.Name, err.Error())
ctx.ServerError("GetModelDirs failed:", err)
return
}
result, err := QueryModelConvertResultFileList(ctx, ID)
if err == nil {
ctx.JSON(http.StatusOK, map[string]interface{}{
"JobID": job.ID,
"VersionName": versionName,
"StatusOK": 0,
"Path": dirArray,
"Dirs": result,
"task": job,
"PageIsCloudBrain": true,
})
} else {
log.Error("GetCloudbrainByJobID failed:%v", err.Error())
ctx.ServerError("GetModelDirs failed:", err)
return
}
}

func QueryModelConvertResultFileList(ctx *context.APIContext, id string) ([]storage.FileInfo, error) {
ID := id
parentDir := ctx.Query("parentDir")
job, err := models.QueryModelConvertById(ID)
if err != nil {
log.Error("GetCloudbrainByJobID(%s) failed:%v", job.Name, err.Error())
return nil, err
}
if job.IsGpuTrainTask() {
//get dirs
dirs, err := routerRepo.GetModelDirs(job.ID, parentDir)
if err != nil {
log.Error("GetModelDirs failed:%v", err.Error(), ctx.Data["msgID"])
return nil, err
}

var fileInfos []storage.FileInfo
err = json.Unmarshal([]byte(dirs), &fileInfos)
if err != nil {
log.Error("json.Unmarshal failed:%v", err.Error(), ctx.Data["msgID"])
return nil, err
}

for i, fileInfo := range fileInfos {
temp, _ := time.Parse("2006-01-02 15:04:05", fileInfo.ModTime)
fileInfos[i].ModTime = temp.Local().Format("2006-01-02 15:04:05")
}

sort.Slice(fileInfos, func(i, j int) bool {
return fileInfos[i].ModTime > fileInfos[j].ModTime
})

return fileInfos, nil
} else {
var versionName = "V0001"
models, err := storage.GetObsListObject(job.ID, "output/", parentDir, versionName)
if err != nil {
log.Info("get TrainJobListModel failed:", err)
return nil, err
}
return models, nil
}

}

func doGrampusModelConvertMigrate(status string, job *models.AiModelConvert) error {
if strings.ToUpper(status) == models.GrampusStatusSucceeded {
if job.StatusResult != "MIGRATE_SUCCEED" {
lock := redis_lock.NewDistributeLock(redis_key.RecordHandleLock(job.CloudBrainTaskId))
success, err := lock.Lock(60 * time.Second)
if err != nil {
log.Error("HandleUnfinishedMigrateRecord lock err.ID=%d %v", job.CloudBrainTaskId, err)
return err
}
if !success {
log.Error("HandleUnfinishedMigrateRecord lock failed.ID=%d ", job.CloudBrainTaskId)
return nil
}
//todo migrate
grampus.PostModelMigrate(job.CloudBrainTaskId)
go dealModelConvertModelMigrate(job)
}
}

return nil
}

func dealModelConvertModelMigrate(job *models.AiModelConvert) {
count := 0
for {
if count > 20 {
break
}
log.Info("deal count= " + fmt.Sprint(count))
if updateModelMigrateStatus(job) {
count++
time.Sleep(3 * time.Second)
} else {
break
}
}
}

func updateModelMigrateStatus(job *models.AiModelConvert) bool {
res, err := grampus.ModelMigrateInfo(job.CloudBrainTaskId)
if err != nil {
log.Error("ModelMigrateInfo err. r.ID=%d %v", job.CloudBrainTaskId, err)
return true
}
log.Info("model migrate status=" + fmt.Sprint(res.Status))
status := models.GrampusMigrateResponse(res.Status).ConvertToModelMigrateStep()
if status == models.GrampusMigrateSuccess {
log.Info("start to move grampus bucket to minio")
//to move bucket
if err := schedule.MoveBucketInOpenIMinio(res.DestObjectKey, grampus.GetGPUModelObjectKey(job.ID), res.DestBucket, setting.Attachment.Minio.Bucket); err != nil {
log.Error("MoveBucketInOpenIMinio err.%v", err)
} else {
models.UpdateResultMigrateFlag(job.ID, "MIGRATE_SUCCEED")
}
return false
}
if status == models.GrampusMigrateFailed || status == models.GrampusMigrateNoNeed {
return false
}
return true
}

func getModelMigrateStatusFromGrampus(jobId string) models.ModelMigrateStep {
res, err := grampus.ModelMigrateInfo(jobId)
if err != nil {
log.Error("ModelMigrateInfo err. r.ID=%d %v", jobId, err)
return -1
}
log.Info("model convert ModelMigrateInfo r.ID=%d res=%+v", jobId, res)
return models.GrampusMigrateResponse(res.Status).ConvertToModelMigrateStep()
}

+ 348
- 78
routers/repo/ai_model_convert.go View File

@@ -12,6 +12,8 @@ import (
"path"
"strings"

"code.gitea.io/gitea/entity"
"code.gitea.io/gitea/manager/client/grampus"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/cloudbrain"
"code.gitea.io/gitea/modules/context"
@@ -21,6 +23,7 @@ import (
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/services/ai_task_service/storage_helper"
uuid "github.com/satori/go.uuid"
)

@@ -34,7 +37,7 @@ const (
MXNET_ENGINE = 6
ModelMountPath = "/model"
CodeMountPath = "/code"
DataSetMountPath = "/dataset"
DataSetMountPath = "/tmp/dataset"
LogFile = "log.txt"
DefaultBranchName = "master"
SubTaskName = "task1"
@@ -205,7 +208,6 @@ func createNpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
return err
}
deleteLocalDir(codeLocalPath)

intputshape := strings.Split(modelConvert.InputShape, ",")
n := "256"
c := "1"
@@ -416,12 +418,22 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
}

log.Info("command=" + command)

codePath := setting.JobPath + modelConvert.ID + CodeMountPath
downloadConvertCode(setting.ModelConvert.ConvertRepoPath, codePath, DefaultBranchName)
codeTmpPath := setting.JobPath + modelConvert.ID + CodeMountPath + "tmp"
uploader := storage_helper.SelectUploaderFromStorageType(entity.MINIO)
codeRemoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(modelConvert.ID), "code")
log.Info("codePath=" + codePath)
log.Info("codeTmpPath=" + codeTmpPath)
log.Info("codeRemoteDir=" + codeRemoteDir)

downloadConvertCode(setting.ModelConvert.ConvertRepoPath, codeTmpPath, DefaultBranchName)

Zip(codePath+"/master.zip", codeTmpPath)

uploadCodeToMinio(codePath+"/", modelConvert.ID, CodeMountPath+"/")

deleteLocalDir(codePath)
deleteLocalDir(codeTmpPath)

minioCodePath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/code"
log.Info("minio codePath=" + minioCodePath)
@@ -436,73 +448,330 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
minioModelPath := setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/model"
log.Info("minio model path=" + minioModelPath)

if TrainResourceSpecs == nil {
json.Unmarshal([]byte(setting.TrainResourceSpecs), &TrainResourceSpecs)
}
resourceSpec := TrainResourceSpecs.ResourceSpec[setting.ModelConvert.GPU_Resource_Specs_ID]
jobResult, err := cloudbrain.CreateJob(modelConvert.ID, models.CreateJobParams{
JobName: modelConvert.ID,
RetryCount: 1,
GpuType: setting.ModelConvert.GpuQueue,
Image: IMAGE_URL,
TaskRoles: []models.TaskRole{
{
Name: SubTaskName,
TaskNumber: 1,
MinSucceededTaskCount: 1,
MinFailedTaskCount: 1,
CPUNumber: resourceSpec.CpuNum,
GPUNumber: resourceSpec.GpuNum,
MemoryMB: resourceSpec.MemMiB,
ShmMB: resourceSpec.ShareMemMiB,
Command: command,
NeedIBDevice: false,
IsMainRole: false,
UseNNI: false,
},
},
Volumes: []models.Volume{
{
HostPath: models.StHostPath{
Path: minioCodePath,
MountPath: CodeMountPath,
ReadOnly: false,
datasetRemoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(modelConvert.ID), "dataset")

outputRemoteDir := path.Join(uploader.GetJobDefaultObjectKeyPrefix(modelConvert.ID), "model")

datasetDirectoryObjectKey := datasetRemoteDir
if !strings.HasSuffix(datasetRemoteDir, "/") {
datasetDirectoryObjectKey = datasetRemoteDir + "/"
}
codeObjectKey := codeRemoteDir + "/master.zip"
log.Info("codeObjectKey=" + codeObjectKey)
log.Info("uploader.GetRealPath(codeObjectKey)=" + uploader.GetRealPath(codeObjectKey))

req := entity.CreateTrainTaskRequest{
Name: modelConvert.ID,
DisplayJobName: modelConvert.Name,
Description: "",
TaskConfig: getGrampusTrainTaskConfig(),
Tasks: []entity.TrainTask{{
Command: command,
Name: modelConvert.ID,
ResourceSpecId: setting.ModelConvert.GPU_Resource_Specs_ID, //toDO
ImageId: "", //form.ImageID,
ImageUrl: IMAGE_URL,
Datasets: []entity.ContainerData{
entity.ContainerData{
ContainerPath: DataSetMountPath,
Name: "dataset",
ReadOnly: false,
ObjectKey: datasetDirectoryObjectKey,
RealPath: uploader.GetRealPath(datasetRemoteDir),
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
GetBackEndpoint: uploader.GetEndpoint(),
IsDir: true,
StorageType: entity.MINIO,
},
},
{
HostPath: models.StHostPath{
Path: dataActualPath,
MountPath: DataSetMountPath,
ReadOnly: true,
Code: []entity.ContainerData{
entity.ContainerData{
Name: strings.ToLower(ctx.Repo.Repository.Name),
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
ObjectKey: codeObjectKey,
ReadOnly: true,
ContainerPath: "/tmp/code/master.zip",
RealPath: uploader.GetRealPath(codeObjectKey),
IsDir: true,
S3DownloadUrl: uploader.GetS3DownloadUrl(codeObjectKey),
StorageType: entity.MINIO,
},
},
{
HostPath: models.StHostPath{
Path: minioModelPath,
MountPath: ModelMountPath,
ReadOnly: false,
Queues: []models.ResourceQueue{
models.ResourceQueue{
AiCenterCode: setting.ModelConvert.GPU_AiCenter_Code,
},
},
},
})
PreTrainModel: nil,
BootFile: "",
OutPut: []entity.ContainerData{{
ContainerPath: "/tmp/output",
ReadOnly: false,
ObjectKey: outputRemoteDir,
RealPath: uploader.GetRealPath(outputRemoteDir),
Bucket: uploader.GetBucket(),
EndPoint: uploader.GetEndpoint(),
GetBackEndpoint: uploader.GetEndpoint(),
IsDir: true,
StorageType: entity.MINIO,
}},
Params: models.Parameters{},
Spec: &models.Specification{
ID: setting.ModelConvert.GPU_Spec_ID,
SourceSpecId: setting.ModelConvert.GPU_Resource_Specs_ID,
AccCardsNum: 1,
AccCardType: setting.ModelConvert.GPU_Spec_AccCardType,
ComputeResource: "GPU",
AiCenterCode: setting.ModelConvert.GPU_AiCenter_Code,
},
RepoName: ctx.Repo.Repository.Name,
WorkServerNumber: 1,
}},
}

reqJson, _ := json.Marshal(req)
log.Info("reqJson=" + string(reqJson))

jobResult, err := createGrampusTrainJob(req, command)

if err != nil {
log.Error("CreateJob failed:", err.Error(), ctx.Data["MsgID"])
models.UpdateModelConvertFailed(modelConvert.ID, "FAILED", err.Error())
return err
}
if jobResult.Code != Success {
log.Error("CreateJob(%s) failed:%s", modelConvert.ID, jobResult.Msg, ctx.Data["MsgID"])
models.UpdateModelConvertFailed(modelConvert.ID, "FAILED", err.Error())
return errors.New(jobResult.Msg)
jobResultJson, _ := json.Marshal(jobResult)
log.Info("jobResultJson=" + string(jobResultJson))

if jobResult.ErrorCode != 0 {
log.Error("CreateJob(%s) failed:%s", modelConvert.ID, jobResult.ErrorMsg, ctx.Data["MsgID"])
models.UpdateModelConvertFailed(modelConvert.ID, "FAILED", jobResult.ErrorMsg)
return errors.New(jobResult.ErrorMsg)
}

var jobID = jobResult.Payload["jobId"].(string)
var jobID = jobResult.JobInfo.JobID
log.Info("jobId=" + jobID)
models.UpdateModelConvertCBTI(modelConvert.ID, jobID)

return nil
}

func getGrampusTrainTaskConfig() *entity.AITaskBaseConfig {
codePath := "/tmp/code"
datasetPath := "/tmp/dataset"
pretrainModelPath := "/tmp/pretrainmodel"
outputPath := "/tmp/output"
var config = &entity.AITaskBaseConfig{
ContainerSteps: map[entity.ContainerDataType]*entity.ContainerBuildOpts{
entity.ContainerCode: {
ContainerPath: codePath,
StorageRelativePath: cloudbrain.CodeMountPath,
ReadOnly: false,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
entity.ContainerDataset: {
ContainerPath: datasetPath,
ReadOnly: true,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
entity.ContainerPreTrainModel: {
ContainerPath: pretrainModelPath,
ReadOnly: true,
AcceptStorageType: []entity.StorageType{entity.MINIO, entity.OBS},
},
entity.ContainerOutPutPath: {
ContainerPath: outputPath,
StorageRelativePath: cloudbrain.ModelMountPath,
ReadOnly: false,
AcceptStorageType: []entity.StorageType{entity.MINIO},
MKDIR: false,
},
},
}
config.ActionType = models.ActionCreateGrampusGPUTrainTask
config.IsActionUseJobId = true
return config
}

func createGrampusTrainJob(req entity.CreateTrainTaskRequest, exeCommand string) (*models.CreateGrampusJobResponse, error) {
jobResult, err := grampus.CreateJob(convertTrainReq2Grampus(req, exeCommand))
if err != nil {
log.Error("CreateNoteBook failed: %v", err.Error())
return nil, err
}
return jobResult, nil
}

func convertTrainReq2Grampus(req entity.CreateTrainTaskRequest, exeCommand string) models.CreateGrampusJobRequest {
command := generateGrampusTrainCommand(req, exeCommand)

tasks := make([]models.GrampusTasks, len(req.Tasks))
for i := 0; i < len(req.Tasks); i++ {
t := req.Tasks[i]
tasks[i] = convertTrainTask2Grampus(t, command)
}

return models.CreateGrampusJobRequest{Name: req.Name, Tasks: tasks}
}

func convertTrainTask2Grampus(t entity.TrainTask, command string) models.GrampusTasks {
return models.GrampusTasks{
Name: t.Name,
ResourceSpecId: t.ResourceSpecId,
ImageId: t.ImageId,
ImageUrl: t.ImageUrl,
Datasets: convertContainerArray2GrampusArray(t.Datasets),
Code: convertContainerArray2Grampus(t.Code),
Command: command,
CenterID: []string{t.Queues[0].AiCenterCode},
ReplicaNum: 1,
Models: convertContainerArray2GrampusArray(t.PreTrainModel),
BootFile: t.BootFile,
OutPut: convertContainerArray2Grampus(t.OutPut),
WorkServerNumber: t.WorkServerNumber,
}
}

func convertContainerArray2GrampusArray(containerDatas []entity.ContainerData) []models.GrampusDataset {
res := make([]models.GrampusDataset, len(containerDatas))
for i := 0; i < len(containerDatas); i++ {
d := containerDatas[i]
res[i] = convertContainer2Grampus(d)
}
return res
}

func convertContainerArray2Grampus(containerDatas []entity.ContainerData) models.GrampusDataset {
res := models.GrampusDataset{}
if containerDatas != nil && len(containerDatas) > 0 {
res = convertContainer2Grampus(containerDatas[0])
}
return res
}

func convertContainer2Grampus(d entity.ContainerData) models.GrampusDataset {
return models.GrampusDataset{
Name: d.Name,
Bucket: d.Bucket,
EndPoint: d.EndPoint,
ObjectKey: d.ObjectKey,
ContainerPath: d.ContainerPath,
ReadOnly: d.ReadOnly,
GetBackEndpoint: d.GetBackEndpoint,
Size: d.Size,
}
}

func generateGrampusTrainCommand(req entity.CreateTrainTaskRequest, exeCommand string) string {
t := req.Tasks[0]
containerConfig := req.TaskConfig
computeResource := t.Spec.ComputeResource
var codePath = containerConfig.GetContainerPath(entity.ContainerCode)
var modelPath = containerConfig.GetContainerPath(entity.ContainerPreTrainModel)
var datasetPath = containerConfig.GetContainerPath(entity.ContainerDataset)
var outputPath = containerConfig.GetContainerPath(entity.ContainerOutPutPath)

builder := &entity.CommandBuilder{}
builder.
//mkdir dirs
Add(buildMkdirCommand(codePath, modelPath, datasetPath, outputPath)).
//unzip code
Add(buildUnzipCodeCommand(codePath, t.Code[0].ContainerPath, computeResource)).
//unzip dataset
Add(buildUnzipDatasetCommand(t.Datasets, datasetPath, computeResource)).
//export
Add(buildExportCommand(req.Name, computeResource)).
//exec code
Add(buildExeCommand(exeCommand))

return builder.ToString()
}
func buildExeCommand(exeCommand ...string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
for _, dir := range exeCommand {
builder.Next(entity.NewCommand(dir))
}
return builder
}

func buildMkdirCommand(dirs ...string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
for _, dir := range dirs {
builder.Next(entity.NewCommand("mkdir", "-p", dir))
}
return builder
}

func buildUnzipCodeCommand(codeConfigPath, codeFilePath, computeSource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
if computeSource == models.NPU {
return builder
}
builder.
Next(entity.NewCommand("echo", "'start to unzip code'")).
Next(entity.NewCommand("cd", codeConfigPath)).
Next(entity.NewCommand("unzip", "-q", codeFilePath)).
Next(entity.NewCommand("echo", "'unzip code finished'")).
Next(entity.NewCommand("ls", "-l")).
Next(entity.NewCommand("ls", "-l", "mnist_pytorchexample_gpu"))
return builder
}
func buildUnzipDatasetCommand(datasets []entity.ContainerData, datasetPath, computeSource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}
if computeSource == models.NPU {
return builder
}
if len(datasets) == 0 {
return nil
}
builder.Next(entity.NewCommand("cd", datasetPath)).
Next(entity.NewCommand("echo", "'start to unzip datasets'"))

fileDatasets := make([]entity.ContainerData, 0)
for _, dataset := range datasets {
if !dataset.IsDir {
fileDatasets = append(fileDatasets, dataset)
}
}
//单数据集
if len(fileDatasets) == 1 {
if strings.HasSuffix(fileDatasets[0].Name, ".tar.gz") {
builder.Next(entity.NewCommand("tar", "--strip-components=1", "-zxvf", "'"+fileDatasets[0].Name+"'"))
} else {
builder.Next(entity.NewCommand("unzip", "-q", "'"+fileDatasets[0].Name+"'"))
}
builder.Next(entity.NewCommand("ls", "-l"))
builder.Next(entity.NewCommand("echo", "'unzip datasets finished'"))
return builder
}
//多数据集
for i := 0; i < len(fileDatasets); i++ {
name := fileDatasets[i].Name
if strings.HasSuffix(name, ".tar.gz") {
builder.Next(entity.NewCommand("tar", "-zxvf", name))
} else {
builder.Next(entity.NewCommand("unzip", "-q", "'"+name+"'", "-d", "'./"+strings.TrimSuffix(name, ".zip")+"'"))
}
}
builder.Next(entity.NewCommand("ls", "-l"))
builder.Next(entity.NewCommand("echo", "'unzip datasets finished'"))
return builder
}

func buildExportCommand(jobName, computeResource string) *entity.CommandBuilder {
builder := &entity.CommandBuilder{}

if computeResource == models.NPU {
outputRemotePath := setting.CodePathPrefix + jobName + modelarts.OutputPath
builder.Next(entity.NewCommand("export", "bucket="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
} else {
outputRemotePath := setting.CBCodePathPrefix + jobName + cloudbrain.ModelMountPath + "/"
builder.Next(entity.NewCommand("export", "env="+setting.Grampus.Env, "&&", "export", "remote_path="+outputRemotePath))
}
return builder
}

func deleteLocalDir(dirpath string) {
//TODO delete
_err := os.RemoveAll(dirpath)
@@ -527,15 +796,13 @@ func getGpuModelConvertCommand(name string, modelFile string, modelConvert *mode
h = inputshape[2]
w = inputshape[3]
}
command += "python3 /code/" + bootfile + " --model " + modelFile + " --n " + n + " --c " + c + " --h " + h + " --w " + w
command += "list -all /tmp/code;list -all /tmp/dataset;python3 /tmp/code/" + bootfile + " --model " + modelFile + " --n " + n + " --c " + c + " --h " + h + " --w " + w
if modelConvert.DestFormat == CONVERT_FORMAT_TRT {
if modelConvert.NetOutputFormat == NetOutputFormat_FP16 {
command += " --fp16 True"
} else {
command += " --fp16 False"
}
}
command += " > " + ModelMountPath + "/" + name + "-" + LogFile
command += " > /tmp/output/" + name + "-" + LogFile
return command
}

@@ -557,9 +824,13 @@ func DeleteModelConvert(ctx *context.Context) {

func deleteCloudBrainTask(task *models.AiModelConvert) {
if task.IsGpuTrainTask() {
log.Info("delete cloudbrain one resource.")
log.Info("delete grampus model convert task.")
_, err := grampus.DeleteJob(task.CloudBrainTaskId)
if err != nil {
log.Error("Delete grampus job failed:%v", err)
}
dirPath := setting.CBCodePathPrefix + task.ID + "/"
err := storage.Attachments.DeleteDir(dirPath)
err = storage.Attachments.DeleteDir(dirPath)
if err != nil {
log.Error("DeleteDir(%s) failed:%v", dirPath, err)
}
@@ -579,7 +850,7 @@ func stopModelConvert(id string) error {
return err
}
if job.IsGpuTrainTask() {
err = cloudbrain.StopJob(job.CloudBrainTaskId)
_, err = grampus.StopJob(job.CloudBrainTaskId)
if err != nil {
log.Error("Stop cloudbrain Job(%s) failed:%v", job.CloudBrainTaskId, err)
}
@@ -662,34 +933,33 @@ func ShowModelConvertInfo(ctx *context.Context) {
ctx.HTML(200, tplModelConvertInfo)
return
}
result, err := cloudbrain.GetJob(job.CloudBrainTaskId)
jobResult, err := grampus.GetJob(job.CloudBrainTaskId)
if err != nil {
log.Info("error:" + err.Error())
ctx.Data["error"] = err.Error()
ctx.HTML(200, tplModelConvertInfo)
return
}
if result != nil {
jobRes, _ := models.ConvertToJobResultPayload(result.Payload)
ctx.Data["result"] = jobRes
taskRoles := jobRes.TaskRoles
taskRes, _ := models.ConvertToTaskPod(taskRoles[cloudbrain.SubTaskName].(map[string]interface{}))
ctx.Data["taskRes"] = taskRes
ctx.Data["ExitDiagnostics"] = taskRes.TaskStatuses[0].ExitDiagnostics
ctx.Data["AppExitDiagnostics"] = jobRes.JobStatus.AppExitDiagnostics

job.Status = jobRes.JobStatus.State

if jobRes.JobStatus.State != string(models.JobWaiting) && jobRes.JobStatus.State != string(models.JobFailed) {
job.ContainerIp = taskRes.TaskStatuses[0].ContainerIP
job.ContainerID = taskRes.TaskStatuses[0].ContainerID
job.Status = taskRes.TaskStatuses[0].State
jobEvent, err := grampus.GetTrainJobEvents(job.CloudBrainTaskId)
jobEventJson, _ := json.Marshal(jobEvent)
log.Info("jobEventJson=" + string(jobEventJson))
if jobEvent != nil {
ctx.Data["AppExitDiagnostics"] = string(jobEventJson)
}
if jobResult != nil {
ctx.Data["ExitDiagnostics"] = jobResult.ExitDiagnostics
if jobResult.JobInfo.Status == models.GrampusStatusPending {
job.Status = models.GrampusStatusWaiting
} else {
job.Status = strings.ToUpper(jobResult.JobInfo.Status)
}
if jobRes.JobStatus.State != string(models.JobWaiting) {
models.ModelComputeAndSetDuration(job, jobRes)
job.StartTime = timeutil.TimeStamp(jobResult.JobInfo.StartedAt)
job.EndTime = timeutil.TimeStamp(jobResult.JobInfo.CompletedAt)
if strings.ToUpper(jobResult.JobInfo.Status) != models.GrampusStatusWaiting && jobResult.JobInfo.Status != models.GrampusStatusPending {
models.ModelConvertSetDuration(job)
err = models.UpdateModelConvert(job)
if err != nil {
log.Error("UpdateModelConvert failed:", err)
log.Error("UpdateJob failed:", err)
}
}
}


+ 6
- 5
routers/repo/cloudbrain.go View File

@@ -114,10 +114,10 @@ func cloudBrainNewDataPrepare(ctx *context.Context, jobType string) error {
ctx.Data["benchmark_categories"] = categories.Category

ctx.Data["benchmark_types"] = GetBenchmarkTypes(ctx).BenchmarkType
queuesDetail, _ := cloudbrain.GetQueuesDetail()
if queuesDetail != nil {
ctx.Data["QueuesDetail"] = queuesDetail
}
// queuesDetail, _ := cloudbrain.GetQueuesDetail()
// if queuesDetail != nil {
// ctx.Data["QueuesDetail"] = queuesDetail
// }

prepareCloudbrainOneSpecs(ctx)

@@ -2438,7 +2438,8 @@ func CloudBrainBenchmarkNew(ctx *context.Context) {
ctx.ServerError("get new cloudbrain info failed", err)
return
}
ctx.HTML(200, tplCloudBrainBenchmarkNew)
//ctx.HTML(200, tplCloudBrainBenchmarkNew)
ctx.HTML(200, tplCloudBrainModelSafetyNewNpu)
}

func getBenchmarkAttachment(benchmarkTypeID, benchmarkChildTypeID int, ctx *context.Context) (*models.BenchmarkDataset, error) {


+ 2
- 2
routers/repo/repo_statistic.go View File

@@ -15,8 +15,8 @@ import (
)

func StatisticAuto() {
RepoStatisticAuto()
TimingCountData()
go RepoStatisticAuto()
go TimingCountData()
}

//auto daily


+ 97
- 1
routers/repo/util.go View File

@@ -1,7 +1,103 @@
package repo

import "regexp"
import (
"archive/zip"
"io"
"io/fs"
"io/ioutil"
"os"
"path/filepath"
"regexp"
)

var NamePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`)

var GrampusNamePattern = regexp.MustCompile(`^[A-Za-z][\w|\-|\\.]{0,49}$`)

// Zip compresses the specified files or dirs to zip archive.
// If a path is a dir don't need to specify the trailing path separator.
// For example calling Zip("archive.zip", "dir", "csv/baz.csv") will get archive.zip and the content of which is
// baz.csv
// dir
// ├── bar.txt
// └── foo.txt
// Note that if a file is a symbolic link it will be skipped.
func Zip(zipPath string, paths string) error {
// Create zip file and it's parent dir.
if err := os.MkdirAll(filepath.Dir(zipPath), os.ModePerm); err != nil {
return err
}
archive, err := os.Create(zipPath)
if err != nil {
return err
}
defer archive.Close()

// New zip writer.
zipWriter := zip.NewWriter(archive)
defer zipWriter.Close()

files, err := ioutil.ReadDir(paths)
if err != nil {
return err
}
// Traverse the file or directory.
for _, file := range files {
// Remove the trailing path separator if path is a directory.
rootPath := filepath.Join(paths, file.Name())

// Visit all the files or directories in the tree.
err = filepath.Walk(rootPath, walkFunc(rootPath, zipWriter))
if err != nil {
return err
}
}
return nil
}

func walkFunc(rootPath string, zipWriter *zip.Writer) filepath.WalkFunc {
return func(path string, info fs.FileInfo, err error) error {
if err != nil {
return err
}

// If a file is a symbolic link it will be skipped.
if info.Mode()&os.ModeSymlink != 0 {
return nil
}

// Create a local file header.
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}

// Set compression method.
header.Method = zip.Deflate

// Set relative path of a file as the header name.
header.Name, err = filepath.Rel(filepath.Dir(rootPath), path)
if err != nil {
return err
}
if info.IsDir() {
header.Name += string(os.PathSeparator)
}

// Create writer for the file header and save content of the file.
headerWriter, err := zipWriter.CreateHeader(header)
if err != nil {
return err
}
if info.IsDir() {
return nil
}
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
_, err = io.Copy(headerWriter, f)
return err
}
}

+ 2
- 5
routers/user/auth.go View File

@@ -76,12 +76,10 @@ func AutoSignIn(ctx *context.Context) (bool, error) {
if !models.HasEngine {
return false, nil
}

uname := ctx.GetCookie(setting.CookieUserName)
if len(uname) == 0 {
return false, nil
}

isSucceed := false
defer func() {
if !isSucceed {
@@ -98,12 +96,10 @@ func AutoSignIn(ctx *context.Context) (bool, error) {
}
return false, nil
}

if val, ok := ctx.GetSuperSecureCookie(
base.EncodeMD5(u.Rands+u.Passwd), setting.CookieRememberName); !ok || val != u.Name {
return false, nil
}

isSucceed = true

// Set session IDs
@@ -116,7 +112,8 @@ func AutoSignIn(ctx *context.Context) (bool, error) {
if err := ctx.Session.Release(); err != nil {
return false, err
}

log.Info("Auto login succeed.")
models.SaveLoginInfoToDb(ctx.Req.Request, u)
ctx.SetCookie(setting.CSRFCookieName, "", -1, setting.AppSubURL, setting.SessionConfig.Domain, setting.SessionConfig.Secure, true)
return true, nil
}


+ 11
- 11
services/cloudbrain/clear.go View File

@@ -53,16 +53,23 @@ func ClearCloudbrainResultSpace() {
var ids []int64
for _, task := range tasks {
//(type=0 or (type =2 and compute_resource='CPU/GPU')) and
if task.ComputeResource == "CPU/GPU" || task.ComputeResource == "GCU" {
if task.Type == models.TypeCloudBrainOne {
log.Info("clear cloud brain one,name=" + task.JobName)
err := DeleteCloudbrainOneJobStorage(task.JobName)
if err == nil {
log.Info("clear job in cloudbrain table:" + task.JobName)
ids = append(ids, task.ID)
}
if task.Type == models.TypeC2Net {
deleteC2NetTask(task)
} else if task.Type == models.TypeC2Net {
DeleteCloudbrainOneJobStorage(task.JobName)
deleteC2NetTask(task)
if task.ComputeResource == "NPU" {
deleteModelArtsStorage(task.JobName, task.VersionName)
}
log.Info("clear TypeC2Net,name=" + task.JobName)
ids = append(ids, task.ID)
} else {
log.Info("clear npu,name=" + task.JobName)
id := DeleteNPUJobStorage(task)
if id > 0 {
ids = append(ids, id)
@@ -77,7 +84,6 @@ func ClearCloudbrainResultSpace() {
if len(tasks) < setting.ClearStrategy.BatchSize+setting.ClearStrategy.DebugJobSize {
clearLocalHistoryTrashFile()
clearMinioHistoryTrashFile()

}
log.Info("clear cloudbrain one result space end.")

@@ -201,13 +207,7 @@ func DeleteNPUJobStorage(taskInfo *models.Cloudbrain) int64 {
}
return taskInfo.ID
}
if taskInfo.Type == models.TypeC2Net {
deleteC2NetTask(taskInfo)
if taskInfo.ComputeResource == "NPU" {
deleteModelArtsStorage(taskInfo.JobName, taskInfo.VersionName)
return taskInfo.ID
}
}

return 0
}



+ 2
- 2
templates/repo/cloudbrain/benchmark/index.tmpl View File

@@ -45,9 +45,9 @@
</div>
</div>
<div class="column right aligned">
<a class="ui compact orange basic icon button" href="https://openi.org.cn/projects/Benchmark/#algType" style="box-shadow: none;" target="_blank"><i class="large ri-trophy-fill middle aligned icon"></i>{{$.i18n.Tr "repo.benchmark_leaderboards"}}</a>
{{if .Permission.CanWrite $.UnitTypeCloudBrain}}
<a class="ui green button" href="{{.RepoLink}}/cloudbrain/benchmark/create">{{$.i18n.Tr "repo.modelarts.evaluate_job.new_job"}}</a>
<a class="ui green button" href="{{.RepoLink}}/modelsafety/create_npu">{{$.i18n.Tr "repo.modelarts.evaluate_job.new_job"}}</a>
{{else}}
<a class="ui disabled button" >{{$.i18n.Tr "repo.modelarts.evaluate_job.new_job"}}</a>
{{end}}


+ 2
- 10
templates/repo/cloudbrain/benchmark/new.tmpl View File

@@ -21,12 +21,8 @@
<div class="required min_title inline field">
<label class="label-fix-width" style="font-weight: normal;">{{.i18n.Tr "repo.cloudbrain.benchmark.evaluate_scenes"}}</label>
<div class="ui blue small menu compact selectcloudbrain">
<a class="item alogrithm_benchmark"
href="{{.Link}}?benchmarkMode=alogrithm">{{.i18n.Tr "repo.cloudbrain.benchmark.algorithm"}}</a>
<a class="active item model_benchmark"
href="{{.Link}}?benchmarkMode=model">{{.i18n.Tr "repo.cloudbrain.benchmark.model"}}</a>
<a class="item aisafety_benchmark"
href="{{.RepoLink}}/modelsafety/create_gpu">{{.i18n.Tr "modelsafety.model_security_evaluation"}}</a>
href="{{.RepoLink}}/modelsafety/create_npu">{{.i18n.Tr "modelsafety.model_security_evaluation"}}</a>
</div>
</div>
<div>
@@ -133,12 +129,8 @@
<div class="required min_title inline field">
<label class="label-fix-width" style="font-weight: normal;">{{.i18n.Tr "repo.cloudbrain.benchmark.evaluate_scenes"}}</label>
<div class="ui blue small menu compact selectcloudbrain">
<a class="active item alogrithm_benchmark"
href="{{.Link}}?benchmarkMode=alogrithm">{{.i18n.Tr "repo.cloudbrain.benchmark.algorithm"}}</a>
<a class="item model_benchmark"
href="{{.Link}}?benchmarkMode=model">{{.i18n.Tr "repo.cloudbrain.benchmark.model"}}</a>
<a class="item aisafety_benchmark"
href="{{.RepoLink}}/modelsafety/create_gpu">{{.i18n.Tr "modelsafety.model_security_evaluation"}}</a>
href="{{.RepoLink}}/modelsafety/create_npu">{{.i18n.Tr "modelsafety.model_security_evaluation"}}</a>
</div>
</div>



+ 10
- 25
templates/repo/modelmanage/convertshowinfo.tmpl View File

@@ -273,7 +273,9 @@ td, th {
<td class="ti-text-form-content">
<div class="text-span text-span-w">
<span style="font-size: 12px;" class="">{{TimeSinceUnix1 .CreatedUnix}}</span>
<span style="font-size: 12px;" class="">
{{if eq .StartTime 0}}--{{else}}{{TimeSinceUnix1 .StartTime}} {{end}}
</span>
</div>
</td>
</tr>
@@ -686,33 +688,17 @@ td, th {
function parseInfo(){
let jsonValue = document.getElementById("json_value").value;
let jsonObj = JSON.parse(jsonValue);
let podRoleName = jsonObj["podRoleName"];
let jobEvents = jsonObj["jobEvents"];
let html = "";
if (podRoleName != null){
let task0 = podRoleName["task1-0"];
let podEvents = jsonObj["podEvents"];
let podEventArray = podEvents[task0];
if(podEventArray != null){
for(var i=0; i < podEventArray.length;i++){
if (podEventArray[i]["reason"]!="") {
html +="<p><b>[" +podEventArray[i]["reason"] + "]</b></p>";
html +="<p>" +podEventArray[i]["message"] + "</p>";
html +="<p>" +podEventArray[i]["action"] + "</p>";
if (jobEvents != null){
for(var i=0; i < jobEvents.length;i++){
if (jobEvents[i]["reason"]!="") {
let time = jobEvents[i]["timestamp"] && new Date(jobEvents[i]["timestamp"])
html +="<p><b>[" +jobEvents[i]["reason"] + "]</b> <span>"+time.toLocaleString()+"</span></p>";
html +="<p>" +jobEvents[i]["message"] + "</p>";
}
}
}
let extras= jsonObj["extras"];
if(extras != null){
for(var i=0; i < extras.length;i++){
if (extras[i]["reason"]!="") {
html +="<p><b>[" +extras[i]["reason"] + "]</b></p>";
html +="<p>" +extras[i]["message"] + "</p>";
html +="<p>" +extras[i]["action"] + "</p>";
}
}
}
}

let string = document.getElementById("ExitDiagnostics").value;
string = string.replace(/\r\n/g,"<br>")
string = string.replace(/\n/g,"<br>");
@@ -722,7 +708,6 @@ td, th {
html +="<p><b>[ExitDiagnostics]</b></p>";
html +="<p>" +string + "</p>";
}
document.getElementById("info_display").innerHTML=html;
}


+ 2
- 27
templates/repo/modelsafety/new.tmpl View File

@@ -68,39 +68,14 @@
<div class="required min_title inline field">
<label class="label-fix-width" style="font-weight: normal;">{{.i18n.Tr "repo.cloudbrain.benchmark.evaluate_scenes"}}</label>
<div class="ui blue small menu compact selectcloudbrain">
<a class="item alogrithm_benchmark"
href="{{.RepoLink}}/cloudbrain/benchmark/create?benchmarkMode=alogrithm">{{.i18n.Tr "repo.cloudbrain.benchmark.algorithm"}}</a>
<a class="item model_benchmark"
href="{{.RepoLink}}/cloudbrain/benchmark/create?benchmarkMode=model">{{.i18n.Tr "repo.cloudbrain.benchmark.model"}}</a>
<a class="item active model_safe_benchmark"
<a class="item active model_safe_benchmark"
href="{{.Link}}">{{.i18n.Tr "modelsafety.model_security_evaluation"}}</a>
</div>
</div>
<!-- <div class="required min_title inline field">
<label class="label-fix-width" style="font-weight: normal;">{{.i18n.Tr "cloudbrain.resource_cluster"}}</label>
<div class="ui blue mini menu compact selectcloudbrain">
<a class="item {{if not $Grampus}}active{{end}}" href="{{.RepoLink}}/modelsafety/create_gpu">
<svg class="svg" sxmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16"><path fill="none" d="M0 0h24v24H0z"></path><path d="M12 22C6.477 22 2 17.523 2 12S6.477 2 12 2s10 4.477 10 10-4.477 10-10 10zm-2.29-2.333A17.9 17.9 0 0 1 8.027 13H4.062a8.008 8.008 0 0 0 5.648 6.667zM10.03 13c.151 2.439.848 4.73 1.97 6.752A15.905 15.905 0 0 0 13.97 13h-3.94zm9.908 0h-3.965a17.9 17.9 0 0 1-1.683 6.667A8.008 8.008 0 0 0 19.938 13zM4.062 11h3.965A17.9 17.9 0 0 1 9.71 4.333 8.008 8.008 0 0 0 4.062 11zm5.969 0h3.938A15.905 15.905 0 0 0 12 4.248 15.905 15.905 0 0 0 10.03 11zm4.259-6.667A17.9 17.9 0 0 1 15.973 11h3.965a8.008 8.008 0 0 0-5.648-6.667z"></path></svg>
{{.i18n.Tr "cloudbrain.resource_cluster_openi"}}
</a>
<a class="item {{if $Grampus}}active{{end}}" href="{{.RepoLink}}/modelsafety/create_grampus_gpu">
<svg class="svg" sxmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16"><path fill="none" d="M0 0h24v24H0z"></path><path d="M12 22C6.477 22 2 17.523 2 12S6.477 2 12 2s10 4.477 10 10-4.477 10-10 10zm-2.29-2.333A17.9 17.9 0 0 1 8.027 13H4.062a8.008 8.008 0 0 0 5.648 6.667zM10.03 13c.151 2.439.848 4.73 1.97 6.752A15.905 15.905 0 0 0 13.97 13h-3.94zm9.908 0h-3.965a17.9 17.9 0 0 1-1.683 6.667A8.008 8.008 0 0 0 19.938 13zM4.062 11h3.965A17.9 17.9 0 0 1 9.71 4.333 8.008 8.008 0 0 0 4.062 11zm5.969 0h3.938A15.905 15.905 0 0 0 12 4.248 15.905 15.905 0 0 0 10.03 11zm4.259-6.667A17.9 17.9 0 0 1 15.973 11h3.965a8.008 8.008 0 0 0-5.648-6.667z"></path></svg>
{{.i18n.Tr "cloudbrain.resource_cluster_c2net"}}(Beta)
</a>
</div>
</div> -->
<div class="inline min_title required field">
<label class="label-fix-width" style="font-weight: normal;">{{.i18n.Tr "cloudbrain.compute_resource"}}</label>
<div class="ui blue mini menu compact selectcloudbrain">
<a class="{{if eq .datasetType 0}}active{{end}} item" href="{{.RepoLink}}/modelsafety/create_{{if $Grampus}}grampus_{{end}}gpu">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16"
height="16">
<path fill="none" d="M0 0h24v24H0z" />
<path
d="M3 2.992C3 2.444 3.445 2 3.993 2h16.014a1 1 0 0 1 .993.992v18.016a.993.993 0 0 1-.993.992H3.993A1 1 0 0 1 3 21.008V2.992zM19 11V4H5v7h14zm0 2H5v7h14v-7zM9 6h6v2H9V6zm0 9h6v2H9v-2z" />
</svg>
CPU/GPU
</a>
<a class="{{if eq .datasetType 1}}active{{end}} item" href="{{.RepoLink}}/modelsafety/create_{{if $Grampus}}grampus_{{end}}npu">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16"
height="16">


+ 2
- 0
web_src/js/features/i18nVue.js View File

@@ -5,6 +5,7 @@ export const i18nVue = {
speech_processing: "语音处理",
computer_vision_natural_language_processing: "计算机视觉、自然语言处理",
machine_translation: "机器翻译",
medical_imaging: "医学影像",
question_answering_system: "问答系统",
information_retrieval: "信息检索",
knowledge_graph: "知识图谱",
@@ -226,6 +227,7 @@ export const i18nVue = {
computer_vision_natural_language_processing:
"computer vision and natural language processing",
machine_translation: "machine translation",
medical_imaging: "medical imaging",
question_answering_system: "question answering system",
information_retrieval: "information retrieval",
knowledge_graph: "knowledge graph",


+ 1
- 0
web_src/vuepages/langs/config/en-US.js View File

@@ -476,6 +476,7 @@ const en = {
computer_vision_natural_language_processing:
"computer vision and natural language processing",
machine_translation: "machine translation",
medical_imaging: "medical imaging",
question_answering_system: "question answering system",
information_retrieval: "information retrieval",
knowledge_graph: "knowledge graph",


+ 1
- 0
web_src/vuepages/langs/config/zh-CN.js View File

@@ -491,6 +491,7 @@ const zh = {
speech_processing: "语音处理",
computer_vision_natural_language_processing: "计算机视觉、自然语言处理",
machine_translation: "机器翻译",
medical_imaging: "医学影像",
question_answering_system: "问答系统",
information_retrieval: "信息检索",
knowledge_graph: "知识图谱",


+ 1
- 0
web_src/vuepages/pages/dataset/square/constant.js View File

@@ -4,6 +4,7 @@ export const Category = [
{ name: "speech_processing", active: false },
{ name: "computer_vision_natural_language_processing", active: false },
{ name: "machine_translation", active: false },
{ name: "medical_imaging", active: false },
]
export const Task = [
{ name: "machine_translation", active: false },


Loading…
Cancel
Save