#4131 合入: #2707 训练任务输出结果可导出至数据集 #4065 模型详情页面演化图谱中增加模型训练时用到的数据集文件

Merged
ychao_1983 merged 53 commits from zouap_static into V20230517 1 year ago
  1. +2
    -2
      models/ai_model_manage.go
  2. +15
    -8
      options/locale/locale_en-US.ini
  3. +8
    -1
      options/locale/locale_zh-CN.ini
  4. +11
    -8
      routers/repo/ai_model_manage.go
  5. +22
    -6
      routers/repo/ai_model_square.go
  6. +28
    -16
      routers/repo/attachment.go
  7. +287
    -0
      routers/repo/dataset.go
  8. +7
    -0
      routers/routes/routes.go
  9. +3
    -0
      routers/search.go
  10. +72
    -0
      templates/custom/export_dataset.tmpl
  11. +12
    -2
      templates/repo/cloudbrain/trainjob/show.tmpl
  12. +12
    -2
      templates/repo/grampus/trainjob/show.tmpl
  13. +13
    -4
      templates/repo/modelarts/trainjob/show.tmpl
  14. +311
    -1
      web_src/js/features/cloudbrainShow.js
  15. +14
    -0
      web_src/js/features/i18nVue.js
  16. +1
    -1
      web_src/js/index.js
  17. +126
    -0
      web_src/less/_dataset.less
  18. +1
    -1
      web_src/less/openi.less
  19. +17
    -1
      web_src/vuepages/apis/modules/dataset.js
  20. +2
    -0
      web_src/vuepages/langs/config/en-US.js
  21. +2
    -0
      web_src/vuepages/langs/config/zh-CN.js
  22. +68
    -0
      web_src/vuepages/pages/dataset/exportDataset/index.vue
  23. +17
    -0
      web_src/vuepages/pages/dataset/exportDataset/vp-export-dataset.js
  24. +10
    -8
      web_src/vuepages/pages/modelmanage/graph/model-graph.css
  25. +17
    -1
      web_src/vuepages/pages/modelmanage/graph/model-graph.js
  26. +8
    -8
      webpack.config.js

+ 2
- 2
models/ai_model_manage.go View File

@@ -49,6 +49,7 @@ type AiModelManage struct {
RepoName string `xorm:"-" json:"repoName"`
RepoDisplayName string `xorm:"-" json:"repoDisplayName"`
RepoOwnerName string `xorm:"-" json:"repoOwnerName"`
DatasetInfo []*DatasetDownload `xorm:"-" json:"datasetInfo"`
ReferenceCount int `xorm:"NOT NULL DEFAULT 0" json:"referenceCount"`
CollectedCount int `xorm:"NOT NULL DEFAULT 0" json:"collectedCount"`
ModelFileList []storage.FileInfo `xorm:"-" json:"modelFileList"`
@@ -762,13 +763,12 @@ func DeleteModelFile(modelFile *AiModelFile) error {
func QueryModelFileByModelId(modelId string) []*AiModelFile {
sess := x.NewSession()
defer sess.Close()
modelFileList := make([]*AiModelFile, 0)
var cond = builder.NewCond()
cond = cond.And(
builder.Eq{"model_id": modelId},
)
result := make([]*AiModelFile, 0)
err := sess.Table(new(AiModelFile)).Where(cond).Find(&modelFileList)
err := sess.Table(new(AiModelFile)).Where(cond).Find(&result)
if err != nil {
log.Info("query AiModelFile failed, err=" + err.Error())
}


+ 15
- 8
options/locale/locale_en-US.ini View File

@@ -994,6 +994,11 @@ favorite=Like
disassociate=Disassociate
benchmark_dataset_tip=Note: first use the dataset function to upload the model, and then select the model from the dataset list.
file_deleted=The file has been deleted
select_result_file= Select result file
select_file = Select file
export_file = Export
go_new_dataset= to create the dataset
export_tips = Only <span style="color:red">zip/tar.gz</span> type result file export is supported, and the exported file can finally be viewed on the Datasets tab of the current project.

[repo]
owner = Owner
@@ -1071,7 +1076,7 @@ notebook_select_wrong=Please select a Notebook(.ipynb) file first.
notebook_path_too_long=The total length of selected file or files path exceed 255 characters, please select a shorter path file or change the file path.
notebook_branch_name_too_long=The total length of branch or branches name exceed 255 characters, please select a file in other branch.
notebook_file_no_right=You have no right to access the Notebook(.ipynb) file.
notebook_branch_name_not_support=The branch name contains < > ' " ; \ ` = # $ % ^ ( ), can not run online.
notebook_branch_name_not_support=The branch name contains < > ' " ; \ ` = # $ % ^ ( ), can not run online'.
debug_again_fail=Fail to restart debug task, please try again later.
debug_again_fail_forever=The task was scheduled failed last time, can not restart.

@@ -1114,6 +1119,8 @@ images.public_tooltips = After the image is set to public, it can be seen by oth
images.name_rule = Please enter letters, numbers, _ and - up to 64 characters and cannot end with a dash (-).
images.delete_task = Delete image
images.task_delete_confirm = Are you sure you want to delete this image? Once this image is deleted, it cannot be recovered.
export_result_to_dataset = Export the results to a dataset
loader_result_file = Loading results file...

cloudbrain=Cloudbrain
cloudbrain.task = Cloudbrain Task
@@ -1318,7 +1325,7 @@ model.manage.import_online_model=Import Online Model
model.manage.notcreatemodel=No model has been created
model.manage.init1=Code version: You have not initialized the code repository, please
model.manage.init2=initialized first ;
model.manage.createtrainjob_tip=Training task: you haven't created a training task, please create it first
model.manage.createtrainjob_tip=Training task: you have not created a training task, please create it first
model.manage.createmodel_tip=You can import local model or online model. Import online model should
model.manage.createtrainjob=Create training task.
model.manage.delete=Delete Model
@@ -1541,7 +1548,7 @@ editor.file_deleting_no_longer_exists = The file being deleted, '%s', no longer
editor.file_changed_while_editing = The file contents have changed since you started editing. <a target="_blank" rel="noopener noreferrer" href="%s">Click here</a> to see them or <strong>Commit Changes again</strong> to overwrite them.
editor.file_already_exists = A file named '%s' already exists in this repository.
editor.commit_empty_file_header = Commit an empty file
editor.commit_empty_file_text = The file you're about commit is empty. Proceed?
editor.commit_empty_file_text = The file you are about commit is empty. Proceed?
editor.no_changes_to_show = There are no changes to show.
editor.fail_to_update_file = Failed to update/create file '%s' with error: %v
editor.push_rejected_no_message = The change was rejected by the server without a message. Please check githooks.
@@ -1557,7 +1564,7 @@ editor.require_signed_commit = Branch requires a signed commit
editor.repo_too_large = Repository can not exceed %d MB
editor.repo_file_invalid = Upload files are invalid
editor.upload_file_too_much = Can not upload more than %d files at a time
editor.rename = rename "%s" to %s"
editor.rename = rename "%s" to "%s"
editor.file_changed_while_renaming=The version of the file or folder to be renamed has changed. Please refresh the page and try again


@@ -1717,7 +1724,7 @@ issues.collaborator = Collaborator
issues.owner = Owner
issues.re_request_review=Re-request review
issues.remove_request_review=Remove review request
issues.remove_request_review_block=Can't remove review request
issues.remove_request_review_block=Can not remove review request
issues.sign_in_require_desc = <a href="%s">Sign in</a> to join this conversation.
issues.edit = Edit
issues.cancel = Cancel
@@ -1873,7 +1880,7 @@ pulls.merged_as = The pull request has been merged as <a rel="nofollow" class="u
pulls.is_closed = The pull request has been closed.
pulls.has_merged = The pull request has been merged.
pulls.title_wip_desc = `<a href="#">Start the title with <strong>%s</strong></a> to prevent the pull request from being merged accidentally.`
pulls.cannot_merge_work_in_progress = This pull request is marked as a work in progress. Remove the <strong>%s</strong> prefix from the title when it's ready
pulls.cannot_merge_work_in_progress = This pull request is marked as a work in progress. Remove the <strong>%s</strong> prefix from the title when it is ready
pulls.data_broken = This pull request is broken due to missing fork information.
pulls.files_conflicted = This pull request has changes conflicting with the target branch.
pulls.is_checking = "Merge conflict checking is in progress. Try again in few moments."
@@ -3235,10 +3242,10 @@ mark_all_as_read = Mark all as read
default_key=Signed with default key
error.extract_sign = Failed to extract signature
error.generate_hash = Failed to generate hash of commit
error.no_committer_account = No account linked to committer's email address
error.no_committer_account = No account linked to committers email address
error.no_gpg_keys_found = "No known key found for this signature in database"
error.not_signed_commit = "Not a signed commit"
error.failed_retrieval_gpg_keys = "Failed to retrieve any key attached to the committer's account"
error.failed_retrieval_gpg_keys = "Failed to retrieve any key attached to the committers account"
error.probable_bad_signature = "WARNING! Although there is a key with this ID in the database it does not verify this commit! This commit is SUSPICIOUS."
error.probable_bad_default_signature = "WARNING! Although the default key has this ID it does not verify this commit! This commit is SUSPICIOUS."



+ 8
- 1
options/locale/locale_zh-CN.ini View File

@@ -999,6 +999,11 @@ favorite=收藏
disassociate=取消关联
benchmark_dataset_tip=说明:先使用数据集功能上传模型,然后从数据集列表选模型。
file_deleted=文件已经被删除
select_result_file=选择结果文件
select_file=选择文件
export_file =导出
go_new_dataset=去创建数据集
export_tips = 仅支持 <span style="color:red">zip/tar.gz</span> 类型的结果文件导出,导出的文件最终可以在当前项目的数据集页签下查看。

[repo]
owner=拥有者
@@ -1070,7 +1075,7 @@ notebook_select_wrong=请先选择Notebook(.ipynb)文件。
notebook_path_too_long=选择的一个或多个Notebook文件路径总长度超过255个字符,请选择路径较短的文件或调整文件路径。
notebook_branch_name_too_long=选择的一个或多个Notebook文件分支名总长度超过255个字符,请选择其他分支的文件。
notebook_file_no_right=您没有这个Notebook文件的读权限。
notebook_branch_name_not_support=分支名包含< > ' " ; \ ` = # $ % ^ ( )字符,不支持在线运行。
notebook_branch_name_not_support=分支名包含< > ' " ; \ ` = # $ % ^ ( )字符,不支持在线运行'
debug_again_fail=再次调试失败,请稍后再试。
debug_again_fail_forever=这个任务之前没有调度成功,不能再次调试。

@@ -1113,6 +1118,8 @@ images.public_tooltips = 镜像设置为公开后,可被其他用户看到。
images.name_rule = 请输入字母、数字、_和-,最长100个字符,且不能以中划线(-)结尾。
images.delete_task = 删除镜像
images.task_delete_confirm = 你确认删除该镜像么?此镜像一旦删除不可恢复。
export_result_to_dataset = 导出结果至数据集
loader_result_file = 正在加载结果文件中...

cloudbrain=云脑
cloudbrain.task = 云脑任务


+ 11
- 8
routers/repo/ai_model_manage.go View File

@@ -2,7 +2,6 @@ package repo

import (
"archive/zip"
"code.gitea.io/gitea/services/cloudbrain/resource"
"encoding/json"
"errors"
"fmt"
@@ -12,6 +11,8 @@ import (
"regexp"
"strings"

"code.gitea.io/gitea/services/cloudbrain/resource"

"code.gitea.io/gitea/services/cloudbrain/modelmanage"

"code.gitea.io/gitea/services/repository"
@@ -151,8 +152,10 @@ func saveModelByParameters(jobId string, versionName string, name string, versio
}

func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile string) {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/"
if aiTask.ComputeResource == models.NPUResource {
modelPath, modelSize, err := downloadModelFromCloudBrainTwo(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
//destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"
modelPath, modelSize, err := downloadModelFromCloudBrainTwo(aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile, destKeyNamePrefix)
if err != nil {
updateStatus(id, 0, STATUS_ERROR, modelPath, err.Error())
log.Info("download model from CloudBrainTwo faild." + err.Error())
@@ -160,9 +163,9 @@ func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile st
updateStatus(id, modelSize, STATUS_FINISHED, modelPath, "")
insertModelFile(id)
}
} else if aiTask.ComputeResource == models.GPUResource {
} else if aiTask.ComputeResource == models.GPUResource || aiTask.ComputeResource == models.GCUResource {

modelPath, modelSize, err := downloadModelFromCloudBrainOne(id, aiTask.JobName, "", aiTask.TrainUrl, modelSelectedFile)
modelPath, modelSize, err := downloadModelFromCloudBrainOne(aiTask.JobName, "", modelSelectedFile, destKeyNamePrefix)
if err != nil {
updateStatus(id, 0, STATUS_ERROR, modelPath, err.Error())
log.Info("download model from CloudBrainOne faild." + err.Error())
@@ -438,7 +441,7 @@ func SaveModel(ctx *context.Context) {
log.Info("save model end.")
}

func downloadModelFromCloudBrainTwo(modelUUID string, jobName string, parentDir string, trainUrl string, modelSelectedFile string) (string, int64, error) {
func downloadModelFromCloudBrainTwo(jobName string, parentDir string, trainUrl string, modelSelectedFile string, destKeyNamePrefix string) (string, int64, error) {
objectkey := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, parentDir), "/")
if trainUrl != "" {
objectkey = strings.Trim(trainUrl[len(setting.Bucket)+1:], "/")
@@ -465,15 +468,15 @@ func downloadModelFromCloudBrainTwo(modelUUID string, jobName string, parentDir
return "", 0, errors.New("Cannot create model, as model is empty.")
}

destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"
//destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"
size, err := storage.ObsCopyManyFile(setting.Bucket, prefix, setting.Bucket, destKeyNamePrefix, filterFiles)

dataActualPath := setting.Bucket + "/" + destKeyNamePrefix
return dataActualPath, size, nil
}

func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir string, trainUrl string, modelSelectedFile string) (string, int64, error) {
destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"
func downloadModelFromCloudBrainOne(jobName string, parentDir string, modelSelectedFile string, destKeyNamePrefix string) (string, int64, error) {
//destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"

modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
//destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/"


+ 22
- 6
routers/repo/ai_model_square.go View File

@@ -252,13 +252,15 @@ func ModelEvolutionMapData(ctx *context.Context) {
"code": "-1",
}
if err == nil {

removeIpInfo(model)
repo, err := models.GetRepositoryByID(model.RepoId)
model.RepoName = repo.Name
model.RepoOwnerName = repo.OwnerName
model.RepoDisplayName = repo.DisplayName()
if err == nil {
model.RepoName = repo.Name
model.RepoOwnerName = repo.OwnerName
model.RepoDisplayName = repo.DisplayName()
setModelUser(model)
setModelDataSet(model)
currentNode := &ModelMap{
Type: 1,
IsCurrent: true,
@@ -287,6 +289,16 @@ func ModelEvolutionMapData(ctx *context.Context) {
}
}

func setModelDataSet(model *models.AiModelManage) {
if model.TrainTaskInfo != "" {
var task models.Cloudbrain
err1 := json.Unmarshal([]byte(model.TrainTaskInfo), &task)
if err1 == nil {
model.DatasetInfo = GetCloudBrainDataSetInfo(task.Uuid, task.DatasetName, false)
}
}
}

func findParent(model *models.AiModelManage) *ModelMap {
if model.TrainTaskInfo != "" {
var task models.Cloudbrain
@@ -300,6 +312,7 @@ func findParent(model *models.AiModelManage) *ModelMap {
parentModel, err := models.QueryModelById(task.ModelId)
setModelRepo(parentModel)
setModelUser(parentModel)
setModelDataSet(parentModel)
if err == nil {
re := &ModelMap{
Type: 1,
@@ -314,6 +327,7 @@ func findParent(model *models.AiModelManage) *ModelMap {
for _, parentModel := range modelList {
setModelUser(parentModel)
setModelRepo(parentModel)
setModelDataSet(parentModel)
if parentModel.Version == task.ModelVersion {
re := &ModelMap{
Type: 1,
@@ -362,6 +376,7 @@ func findChild(currentNode *ModelMap) {
log.Info("task.ModelId=%v,currentModel.ID=%v", task.ModelId, currentModel.ID)
if task.ModelId != "" && task.ModelId == currentModel.ID {
setModelUser(childModel)
setModelDataSet(childModel)
modelMap := &ModelMap{
Type: 1,
Model: childModel,
@@ -372,6 +387,7 @@ func findChild(currentNode *ModelMap) {
log.Info("task.ModelVersion=%v,currentModel.Version=%v", task.ModelVersion, currentModel.Version)
if task.ModelName == currentModel.Name && task.ModelVersion == currentModel.Version {
setModelUser(childModel)
setModelDataSet(childModel)
modelMap := &ModelMap{
Type: 1,
Model: childModel,
@@ -469,14 +485,14 @@ func QueryModelReadMe(ctx *context.Context) {
}
}
}
metas:=map[string]string{"include_toc":"true"}
metas := map[string]string{"include_toc": "true"}
if find {
re["isExistMDFile"] = "true"
re["fileName"] = README_FILE_NAME
strc := string(content)
re["content"] = strc

re["htmlcontent"] = string(markdown.RenderRaw([]byte(strc), "", false,metas))
re["htmlcontent"] = string(markdown.RenderRaw([]byte(strc), "", false, metas))
} else {
re["isExistMDFile"] = "false"
re["fileName"] = README_FILE_NAME
@@ -484,7 +500,7 @@ func QueryModelReadMe(ctx *context.Context) {
result, err := repository.RecommendContentFromPromote(url)
if err == nil {
re["content"] = result
re["htmlcontent"] = string(markdown.RenderRaw([]byte(result), "", false,metas))
re["htmlcontent"] = string(markdown.RenderRaw([]byte(result), "", false, metas))
}
}
re["code"] = "0"


+ 28
- 16
routers/repo/attachment.go View File

@@ -190,8 +190,9 @@ func DeleteAttachment(ctx *context.Context) {

_, err = models.DeleteFileChunkById(attach.UUID)
if err != nil {
ctx.Error(500, fmt.Sprintf("DeleteFileChunkById: %v", err))
return
log.Info("delete from file chunk failed.")
//ctx.Error(500, fmt.Sprintf("DeleteFileChunkById: %v", err))
//return
}
ctx.JSON(200, map[string]string{
"uuid": attach.UUID,
@@ -878,27 +879,39 @@ func CompleteMultipart(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}
dataset, _ := models.GetDatasetByID(ctx.QueryInt64("dataset_id"))

err = finishedUploadAttachment(ctx.QueryInt64("dataset_id"), ctx.QueryInt64("size"), uuid, fileName, ctx.Query("description"), typeCloudBrain, ctx.User)
if err == nil {
ctx.JSON(200, map[string]string{
"result_code": "0",
})
} else {
ctx.JSON(200, map[string]string{
"result_code": "-1",
"msg": err.Error(),
})
}

}

func finishedUploadAttachment(datasetId, size int64, uuid, fileName, description string, typeCloudBrain int, User *models.User) error {
dataset, _ := models.GetDatasetByID(datasetId)
log.Warn("insert attachment to datasetId:" + strconv.FormatInt(dataset.ID, 10))
attachment, err := models.InsertAttachment(&models.Attachment{
UUID: uuid,
UploaderID: ctx.User.ID,
UploaderID: User.ID,
IsPrivate: dataset.IsPrivate(),
Name: fileName,
Size: ctx.QueryInt64("size"),
DatasetID: ctx.QueryInt64("dataset_id"),
Description: ctx.Query("description"),
Size: size,
DatasetID: datasetId,
Description: description,
Type: typeCloudBrain,
})

if err != nil {
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err))
return
return errors.New("InsertAttachment: " + err.Error())
}
attachment.UpdateDatasetUpdateUnix()
go repo_service.IncreaseRepoDatasetNum(dataset.ID)
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {
if isCanDecompress(attachment.Name) {
if typeCloudBrain == models.TypeCloudBrainOne {
@@ -930,10 +943,9 @@ func CompleteMultipart(ctx *context.Context) {
labelmsg.SendAddAttachToLabelSys(string(attachjson))
}
}

ctx.JSON(200, map[string]string{
"result_code": "0",
})
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
return nil
}

func HandleUnDecompressAttachment() {


+ 287
- 0
routers/repo/dataset.go View File

@@ -4,12 +4,16 @@ import (
"encoding/json"
"fmt"
"net/http"
"path"
"sort"
"strconv"
"strings"
"time"
"unicode/utf8"

"code.gitea.io/gitea/modules/redis/redis_client"
"code.gitea.io/gitea/services/repository"
uuid "github.com/satori/go.uuid"

"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
@@ -17,6 +21,7 @@ import (
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/storage"
)

const (
@@ -541,3 +546,285 @@ func GetDatasetStatus(ctx *context.Context) {
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}

func GetDataSetSelectItemByJobId(ctx *context.Context) {
JobID := ctx.Query("jobId")
versionName := ctx.Query("versionName")
re := map[string]interface{}{
"code": "-1",
}
task, err := models.GetCloudbrainByJobIDAndVersionName(JobID, versionName)
if err != nil {
task, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, JobID)
if err != nil {
log.Info("query task error." + err.Error())
re["msg"] = "Query cloudbrain task error." + err.Error()
ctx.JSON(200, re)
return
}
}
var taskType int
if task.ComputeResource == models.NPUResource {
taskType = models.TypeCloudBrainTwo
} else if task.ComputeResource == models.GPUResource || task.ComputeResource == models.GCUResource {
taskType = models.TypeCloudBrainOne
}
result, err := getModelFromObjectSave(task.JobName, taskType, task.VersionName)
if err != nil {
re["msg"] = "Query model file error, " + err.Error()
ctx.JSON(200, re)
return
}
reFile := make([]storage.FileInfo, 0)
for _, file := range result {
if strings.HasSuffix(strings.ToLower(file.FileName), ".zip") || strings.HasSuffix(strings.ToLower(file.FileName), ".tar.gz") {
reFile = append(reFile, file)
}
}
re["code"] = 0
re["files"] = reFile
ctx.JSON(200, re)
}

func GetExportDataSetByMsgId(ctx *context.Context) {
progressId := ctx.Query("progressId")
progress, err := redis_client.Get(progressId)
if err == nil {
ctx.JSON(200, progress)
} else {
ctx.JSON(200, -1)
}
}

func GetCurrentDataSet(ctx *context.Context) {
re := map[string]interface{}{
"code": "-1",
}
dataset, err := models.GetDatasetByRepo(ctx.Repo.Repository)
if err == nil {
re["code"] = 0
re["dataset"] = dataset
ctx.JSON(200, re)
} else {
ctx.JSON(200, re)
}
}

func ExportModelToExistDataSet(ctx *context.Context) {
modelSelectedFile := ctx.Query("modelSelectedFile")
log.Info("modelSelectedFile=" + modelSelectedFile)
datasetId := ctx.QueryInt64("datasetId")
log.Info("datasetId=" + fmt.Sprint(datasetId))

re := map[string]string{
"code": "-1",
}
description := ctx.Query("description")
jobId := ctx.Query("jobId")
storeType := ctx.QueryInt("type")
versionName := ctx.Query("versionName")
dataset, err := models.GetDatasetByID(datasetId)
if err != nil || dataset == nil {
log.Info("Not found dataset.")
re["msg"] = "Not found dataset."
ctx.JSON(200, re)
return
}
aiTask, err := models.GetCloudbrainByJobIDAndVersionName(jobId, versionName)
if err != nil {
aiTask, err = models.GetRepoCloudBrainByJobID(ctx.Repo.Repository.ID, jobId)
if err != nil {
log.Info("query task error." + err.Error())
re["msg"] = "Query cloudbrain task error." + err.Error()
ctx.JSON(200, re)
return
}
}
msgKey := fmt.Sprint(datasetId) + "_" + jobId + "_" + versionName
msgMap := make(map[string]int, 0)
msgMap["##type##"] = storeType
filterFiles := strings.Split(modelSelectedFile, ";")
for _, shortFile := range filterFiles {
msgMap[shortFile] = 0
}
setProgress(msgKey, msgMap)
go asyncToExportDataset(dataset, storeType, modelSelectedFile, aiTask, ctx.User, msgKey, msgMap, versionName, description)
ctx.JSON(200, map[string]string{
"code": "0",
"progressId": msgKey,
})
}

func setProgress(msgKey string, msgMap map[string]int) {
msgMapJson, _ := json.Marshal(msgMap)
redisValue := string(msgMapJson)
log.Info("set redis key=" + msgKey + " value=" + redisValue)
re, err := redis_client.Setex(msgKey, redisValue, 3600*24*time.Second)
if err == nil {
log.Info("re =" + fmt.Sprint(re))
} else {
log.Info("set redis error:" + err.Error())
}
}

func asyncToExportDataset(dataset *models.Dataset, storeType int, modelSelectedFile string, aiTask *models.Cloudbrain, user *models.User, msgKey string, msgMap map[string]int, versionName string, description string) {
filterFiles := strings.Split(modelSelectedFile, ";")
for _, shortFile := range filterFiles {
uuid := uuid.NewV4()
id := uuid.String()
fileName := getFileName(shortFile)
log.Info("shortSrcFile=" + shortFile + " fileName=" + fileName)
if aiTask.ComputeResource == models.GPUResource || aiTask.ComputeResource == models.GCUResource {
size := getFileSizeFromMinio(aiTask.JobName, shortFile)
if isExistInAttachment(fileName, size, dataset, storeType) {
msgMap[shortFile] = -2
} else {
err := exportModelToDataFromMinio(id, aiTask.JobName, shortFile, storeType)
if err != nil {
msgMap[shortFile] = -1
} else {
msgMap[shortFile] = 100
finishedUploadAttachment(dataset.ID, size, id, fileName, description, storeType, user)
}
}

setProgress(msgKey, msgMap)
} else if aiTask.ComputeResource == models.NPUResource {
size := getFileSizeFromObs(aiTask.JobName, shortFile, versionName)
if isExistInAttachment(fileName, size, dataset, storeType) {
msgMap[shortFile] = -2
} else {
err := exportModelToDataFromObs(id, aiTask.JobName, shortFile, storeType, versionName)
if err != nil {
msgMap[shortFile] = -1
} else {
msgMap[shortFile] = 100
finishedUploadAttachment(dataset.ID, size, id, fileName, description, storeType, user)
}
}
setProgress(msgKey, msgMap)
} else {
log.Info("ExportModelToExistDataSet cannot suppport the ComputeResource" + aiTask.ComputeResource)
msgMap[shortFile] = -1
setProgress(msgKey, msgMap)
}
}
}

func isExistInAttachment(fileName string, size int64, dataset *models.Dataset, storeType int) bool {
err := models.GetDatasetAttachments(storeType, false, nil, dataset)
if err == nil && dataset.Attachments != nil && len(dataset.Attachments) > 0 {
for _, attach := range dataset.Attachments {
if attach.Name == fileName && attach.Size == size {
log.Info("Has found same file. so not to create.")
return true
}
}
} else {
log.Info("Not found attachments....")
if err != nil {
log.Info("error=" + err.Error())
}
}
return false
}

func getFileSizeFromObs(jobName string, modelSelectedFile string, versionName string) int64 {
objectkey := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, versionName), "/")
modelSrcPrefix := objectkey + "/"
modelFile := modelSrcPrefix + modelSelectedFile
log.Info("modelfile=" + modelFile)
totalSize := storage.ObsGetFilesSize(setting.Bucket, []string{modelFile})
return totalSize
}

func exportModelToDataFromObs(id string, jobName string, modelSelectedFile string, storeType int, versionName string) error {
objectkey := strings.TrimPrefix(path.Join(setting.TrainJobModelPath, jobName, setting.OutPutPath, versionName), "/")
modelSrcPrefix := objectkey + "/"
modelFile := modelSrcPrefix + modelSelectedFile
log.Info("modelfile=" + modelFile)
//totalSize := storage.ObsGetFilesSize(setting.Bucket, []string{modelFile})

fileName := getFileName(modelSelectedFile)

if storeType == models.TypeCloudBrainOne {
reader, err := storage.ObsDownloadAFile(setting.Bucket, modelFile)
if err == nil {
defer reader.Close()
destPath := strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(id[0:1], id[1:2], id)), "/")
log.Info("destPath=" + destPath)
bucketName := setting.Attachment.Minio.Bucket
_, minioErr := storage.Attachments.UploadContent(bucketName, destPath, reader)
if minioErr != nil {
log.Info("upload to minio failed 2.err=" + minioErr.Error())
return minioErr
}
} else {
log.Info("upload to minio failed 1.err=" + err.Error())
return err
}
} else {
destObjectKey := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(id[0:1], id[1:2], id, fileName)), "/")
log.Info("destPath=" + destObjectKey)
log.Info("copy to bucket=" + setting.Bucket + " objectKey=" + destObjectKey)
obsErr := storage.ObsCopyFile(setting.Bucket, modelFile, setting.Bucket, destObjectKey)
if obsErr != nil {
log.Info("upload to obs failed.err=" + obsErr.Error())
return obsErr
}
}
return nil
}

func getFileSizeFromMinio(jobName string, modelSelectedFile string) int64 {
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
bucketName := setting.Attachment.Minio.Bucket
log.Info(" modelSrcPrefix=" + modelSrcPrefix + " bucket=" + bucketName)
modelFile := modelSrcPrefix + modelSelectedFile
totalSize := storage.MinioGetFilesSize(bucketName, []string{modelFile})
return totalSize
}

func exportModelToDataFromMinio(id string, jobName string, modelSelectedFile string, storeType int) error {
modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/"
bucketName := setting.Attachment.Minio.Bucket
log.Info(" modelSrcPrefix=" + modelSrcPrefix + " bucket=" + bucketName)
modelFile := modelSrcPrefix + modelSelectedFile
//totalSize := storage.MinioGetFilesSize(bucketName, []string{modelFile})

fileName := getFileName(modelSelectedFile)
if storeType == models.TypeCloudBrainOne {
destPath := strings.TrimPrefix(path.Join(setting.Attachment.Minio.BasePath, path.Join(id[0:1], id[1:2], id)), "/")
log.Info("destPath=" + destPath)
_, minioErr := storage.MinioCopyAFile(bucketName, modelFile, bucketName, destPath)
if minioErr != nil {
log.Info("upload to minio failed.err=" + minioErr.Error())
return minioErr
}
} else {
reader, err := storage.Attachments.DownloadAFile(bucketName, modelFile)
if err == nil {
defer reader.Close()
destObjectKey := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(id[0:1], id[1:2], id, fileName)), "/")
log.Info("destPath=" + destObjectKey)
log.Info("upload to bucket=" + setting.Bucket + " objectKey=" + destObjectKey)
obsErr := storage.PutReaderToObs(setting.Bucket, destObjectKey, reader)
if obsErr != nil {
log.Info("upload to obs failed 1.err=" + obsErr.Error())
return obsErr
}
} else {
log.Info("upload to obs failed 2.err=" + err.Error())
return err
}
}
return nil
}

func getFileName(shortFile string) string {
index := strings.LastIndex(shortFile, "/")
if index > 0 {
return shortFile[index+1:]
}
return shortFile
}

+ 7
- 0
routers/routes/routes.go View File

@@ -1226,6 +1226,13 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/label", func() {
m.Get("/:uuid", reqRepoDatasetReader, repo.LabelIndex)
})
m.Group("/model", func() {
m.Get("/getcurrentdataset", reqRepoDatasetReader, repo.GetCurrentDataSet)
m.Get("/getmodelfile", reqRepoDatasetReader, repo.GetDataSetSelectItemByJobId)
m.Get("/getprogress", reqRepoDatasetReader, repo.GetExportDataSetByMsgId)
m.Post("/export_exist_dataset", reqRepoDatasetWriterJson, repo.ExportModelToExistDataSet)
})

}, context.RepoRef())

m.Group("/cloudbrain", func() {


+ 3
- 0
routers/search.go View File

@@ -1263,6 +1263,9 @@ func searchModel(ctx *context.Context, TableName string, Key string, Page int, P
log.Info("actor is null?:" + fmt.Sprint(ctx.User == nil))
sortBy := "ai_model_manage.reference_count desc,ai_model_manage.download_count desc,ai_model_manage.created_unix desc"
if SortBy != "" && SortBy != "default" {
if strings.HasSuffix(SortBy, ".keyword") {
SortBy = SortBy[0:(len(SortBy) - len(".keyword"))]
}
sortBy = "ai_model_manage." + SortBy
if ascending {
sortBy += " asc"


+ 72
- 0
templates/custom/export_dataset.tmpl View File

@@ -0,0 +1,72 @@
<div id="newdataset">
<div class="ui modal export_dataset">
<div id="container">
<div class="header" style="padding: 1rem;background-color: rgba(240, 240, 240, 100);">
<h4 id="model_header">{{$.i18n.Tr "repo.export_result_to_dataset"}}</h4>
</div>
<div class="content content-padding">
<div style="text-align: center;padding-bottom: 2rem;">
<span class="text-tip">{{$.i18n.Tr "dataset.export_tips" | Safe}}</span>
</div>
<form class="ui form">
<div class="ui error message"></div>
{{.CsrfTokenHtml}}
<input type="hidden" name="type" value="0">
<input type="hidden" name="modelSelectedFile">
<div class="inline field">
<label class="label-fix-width">{{$.i18n.Tr "dataset.dataset_available_clusters"}}</label>
<div class="ui blue mini menu compact selectcloudbrain" id="export-dataset-type">
<a class="active item" data-type="0">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16">
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M3 2.992C3 2.444 3.445 2 3.993 2h16.014a1 1 0 0 1 .993.992v18.016a.993.993 0 0 1-.993.992H3.993A1 1 0 0 1 3 21.008V2.992zM19 11V4H5v7h14zm0 2H5v7h14v-7zM9 6h6v2H9V6zm0 9h6v2H9v-2z"/>
</svg>
CPU/GPU
</a>
<a class="item" data-type="1">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16">
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M3 2.992C3 2.444 3.445 2 3.993 2h16.014a1 1 0 0 1 .993.992v18.016a.993.993 0 0 1-.993.992H3.993A1 1 0 0 1 3 21.008V2.992zM19 11V4H5v7h14zm0 2H5v7h14v-7zM9 6h6v2H9V6zm0 9h6v2H9v-2z"/>
</svg>
NPU
</a>
</div>
</div>
<div class="inline field">
<label class="label-fix-width">{{$.i18n.Tr "dataset.file_description"}}:</label>
<textarea style="width: 80%;" id="description" name="description" placeholder='{{.i18n.Tr "repo.modelarts.train_job.new_place"}}' rows="3" maxlength="255" placeholder="" onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)"></textarea>
</div>
<div class="inline field">
<label class="label-fix-width"></label>
<div id="export-dataset-file">
<div class="ui items" id="model-file-export">
</div>
</div>
</div>
<div class="inline field">
<label class="label-fix-width">{{$.i18n.Tr "dataset.select_result_file"}}</label>
<span id="export-dataset-select">
<i class="plus square outline icon"></i>{{$.i18n.Tr "dataset.select_file"}}
<div id="model-file-wrap" style="display:none">
<div class="ui list" id="model-file-result" >
</div>
</div>
</span>
</div>
</form>
<div class="actions" style="margin-left: 140px;margin-top: 2rem;">
<button type="button" class="ui approve green button">{{$.i18n.Tr "dataset.export_file"}}</button>
<button type="button" class="ui button cancel">{{$.i18n.Tr "cancel"}}</button>
</div>
</div>
</div>
</div>
<div class="ui modal no_export_dataset">
<div class="item-empty">
<div class="item-empty-icon"></div>
<div class="item-empty-tips">{{.i18n.Tr "dataset.dataset_no_create"}} <a href="/{{$.RepoRelPath}}/datasets/create">{{$.i18n.Tr "dataset.go_new_dataset"}}</a></div>
</div>
</div>
</div>

+ 12
- 2
templates/repo/cloudbrain/trainjob/show.tmpl View File

@@ -56,13 +56,22 @@
<span class="accordion-panel-title-content">
<span>
<div style="float: right;">
{{if and ($.canDownload) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) }}
{{if and ($.canDownload) (ne .Status "WAITING") }}
<a class="ti-action-menu-item" id="{{.VersionName}}-create-model"
onclick="showcreate({DisplayJobName:{{.DisplayJobName}},JobName:{{.JobName}},JobID:{{.JobID}},VersionName:{{.VersionName}}})">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{end}}

{{if and ($.canDownload) (ne .Status "WAITING") }}
<a class="ti-action-menu-item export-dataset" style="position:relative" id="{{.VersionName}}-export-dataset" data-version="{{.VersionName}}" data-jobid="{{.JobID}}" data-repopath="/{{$.RepoRelPath}}/datasets/model">
{{$.i18n.Tr "repo.export_result_to_dataset"}}
<div class="export-popup" id="{{.VersionName}}-popup">
<div class="ui active centered inline loader" style="width: 100%;display: flex;align-items: center;">{{$.i18n.Tr "repo.loader_result_file"}}</div>
</div>
</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-export-dataset">{{$.i18n.Tr "repo.export_result_to_dataset"}}</a>
{{end}}
</div>
<div class="ac-display-inblock title_text acc-margin-bottom">
<span class="cti-mgRight-sm">{{TimeSinceUnix1 .CreatedUnix}}</span>
@@ -512,6 +521,7 @@
</div>
</div>
</div>
{{template "custom/export_dataset" .}}
</div>
{{template "base/footer" .}}
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js?v={{MD5 AppVer}}"></script>


+ 12
- 2
templates/repo/grampus/trainjob/show.tmpl View File

@@ -70,12 +70,22 @@
</span>
</div>
<div style="float: right;">
{{if and ($.canDownload) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) (ne .ComputeResource "GCU")}}
{{if and ($.canDownload) (ne .Status "WAITING")}}
<a class="ti-action-menu-item" id="{{.VersionName}}-create-model"
onclick="showcreate({DisplayJobName:{{.DisplayJobName}},JobName:{{.JobName}},JobID:{{.JobID}},VersionName:{{.VersionName}},EngineName:{{.EngineName}},ComputeResource:{{.ComputeResource}},Type:{{.Type}}})">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{end}}
{{if and ($.canDownload) (ne .Status "WAITING")}}
<a class="ti-action-menu-item export-dataset" style="position:relative" id="{{.VersionName}}-export-dataset" data-version="{{.VersionName}}" data-jobid="{{.JobID}}" data-repopath="/{{$.RepoRelPath}}/datasets/model">
{{$.i18n.Tr "repo.export_result_to_dataset"}}
<div class="export-popup" id="{{.VersionName}}-popup">
<div class="ui active centered inline loader" style="width: 100%;display: flex;align-items: center;">{{$.i18n.Tr "repo.loader_result_file"}}</div>
</div>
</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-export-dataset">{{$.i18n.Tr "repo.export_result_to_dataset"}}</a>
{{end}}
</div>
</span>
</span>
@@ -568,7 +578,7 @@

</div>
</div>
{{template "custom/export_dataset" .}}
</div>
{{template "base/footer" .}}
<script type="text/javascript" src="/self/ztree/js/jquery.ztree.core.js?v={{MD5 AppVer}}"></script>


+ 13
- 4
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -39,7 +39,7 @@
<div class="active section">{{.displayJobName}}</div>
</div>
</h4>
{{range $k ,$v := .version_list_task}}
<div class="ui accordion border-according" id="accordion{{.VersionName}}"
data-repopath="{{$.RepoRelPath}}/modelarts/train-job" data-jobid="{{.JobID}}"
@@ -52,13 +52,22 @@
<span>
<div style="float: right;">
{{$.CsrfTokenHtml}}
{{if and (.CanModify) (ne .Status "WAITING") ($.Permission.CanWrite $.UnitTypeModelManage) }}
{{if and (.CanModify) (ne .Status "WAITING")}}
<a class="ti-action-menu-item" id="{{.VersionName}}-create-model"
onclick="showcreate({DisplayJobName:{{.DisplayJobName}},JobName:{{.JobName}},JobID:{{.JobID}},VersionName:{{.VersionName}},EngineID:{{.EngineID}},EngineName:{{.EngineName}}})">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-create-model">{{$.i18n.Tr "repo.modelarts.create_model"}}</a>
{{end}}

{{if and (.CanModify) (ne .Status "WAITING")}}
<a class="ti-action-menu-item export-dataset" style="position:relative" id="{{.VersionName}}-export-dataset" data-version="{{.VersionName}}" data-jobid="{{.JobID}}" data-repopath="/{{$.RepoRelPath}}/datasets/model">
{{$.i18n.Tr "repo.export_result_to_dataset"}}
<div class="export-popup" id="{{.VersionName}}-popup">
<div class="ui active centered inline loader" style="width: 100%;display: flex;align-items: center;">{{$.i18n.Tr "repo.loader_result_file"}}</div>
</div>
</a>
{{else}}
<a class="ti-action-menu-item disabled" id="{{.VersionName}}-export-dataset">{{$.i18n.Tr "repo.export_result_to_dataset"}}</a>
{{end}}
{{if .CanModify}}
<a class="ti-action-menu-item"
href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/create_version?version_name={{.VersionName}}&path=show">{{$.i18n.Tr "repo.modelarts.modify"}}</a>
@@ -66,7 +75,6 @@
<a class="ti-action-menu-item disabled"
href="{{$.RepoLink}}/modelarts/train-job/{{.JobID}}/create_version?version_name={{.VersionName}}">{{$.i18n.Tr "repo.modelarts.modify"}}</a>
{{end}}

{{if .CanDel}}
<a class="ti-action-menu-item stop-show-version {{if eq .Status "KILLED" "FAILED" "START_FAILED" "KILLING" "COMPLETED" "SUCCEEDED" "STOPPED"}}disabled {{end}}"
id="{{.VersionName}}-stop"
@@ -569,6 +577,7 @@

</div>
</div>
{{template "custom/export_dataset" .}}
</div>
{{template "base/footer" .}}



+ 311
- 1
web_src/js/features/cloudbrainShow.js View File

@@ -374,7 +374,6 @@ export default async function initCloudrainSow() {
let html = "";
if (jsonObj != null){
let podEventArray = jsonObj['JobEvents'];
console.log("podEventArray",podEventArray)
if(podEventArray != null){
for(var i=0; i < podEventArray.length;i++){
if (podEventArray[i]["reason"] != "") {
@@ -531,6 +530,317 @@ export default async function initCloudrainSow() {

e.stopPropagation();
});
let initShowExportDataset = true
let initShowNoDataset = true
let canExportDataset = true
let fileList = []
let timer = null
let last_version = ''
let datasetID = ''
$('.ui.accordion .export-dataset').on('click', function (e) {
const version_name = this.dataset.version;
const jobId = this.dataset.jobid;
const repoPath = this.dataset.repopath;
const Fileurl = `${repoPath}/getmodelfile`
const dataUrl = `${repoPath}/getcurrentdataset`
const exportUrl = `${repoPath}/export_exist_dataset`
const getProgressUrl = `${repoPath}/getprogress`
if (!initShowExportDataset && last_version===version_name) {
if (initShowNoDataset) {
$(".ui.export_dataset.modal").modal({
onShow: function () {
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
$(".ui.export_dataset.modal .cancel").on('click', function () {
$(".ui.export_dataset.modal").modal('hide')
})
},
onHide: function (params) {
$('.ui.modal.export_dataset #container').off("click")
$('#export-dataset-select').off("click")
$('#export-dataset-type').off("click")
$('.ui.export_dataset.modal .error.message').text('').hide()
},
onApprove: function () {
const modelFileSelectEle = $(".ui.export_dataset.modal #export-dataset-file .items").find('.file_item')
if (modelFileSelectEle.length !== 0 && canExportDataset) {
modelFileSelectEle.each(function (index) {
fileList.push($(this).attr('data-index'))
})
const type = Number($('.ui.modal.export_dataset input[name="type"]').val())
const csrf = $('.ui.modal.export_dataset input[name="_csrf"]').val()
const desc = $('.ui.modal.export_dataset textarea[name="description"]').val() //,description:desc
let params = { _csrf: csrf, jobId: jobId, versionName: version_name, datasetId: datasetID, modelSelectedFile: fileList.join(';'), type: type,description:desc }
postExportDataset(exportUrl, params, getProgressUrl)
} else {
$('.ui.export_dataset.modal .error.message').text(`${i18n['exportDataset']['please_select_file']}`).show()
}
return false;
},
}).modal('show').modal('setting', 'closable', false)
} else {
$(".ui.no_export_dataset.modal").modal({
onShow: function () {
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
},
}).modal("show")
}
} else {
$(`.ui.accordion #${version_name}-export-dataset .export-popup`).show()
$('.ui.export_dataset.modal .error.message').text('').hide()
$.get(dataUrl, (data) => {
initShowExportDataset = false
last_version = version_name
if (data.code === 0) {
datasetID = data.dataset.ID
getModelFileList(Fileurl, version_name, jobId)
getInitEXportDataset(getProgressUrl,data.dataset.ID,jobId,version_name)
$(".ui.export_dataset.modal").modal({
onApprove: function () {
const modelFileSelectEle = $(".ui.export_dataset.modal #export-dataset-file .items").find('.file_item')
if (modelFileSelectEle.length !== 0 && canExportDataset) {
modelFileSelectEle.each(function (index) {
fileList.push($(this).attr('data-index'))
})
const type = Number($('.ui.modal.export_dataset input[name="type"]').val())
const csrf = $('.ui.modal.export_dataset input[name="_csrf"]').val()
const desc = $('.ui.modal.export_dataset textarea[name="description"]').val() //,description:desc
let params = { _csrf: csrf, jobId: jobId, versionName: version_name, datasetId: data.dataset.ID, modelSelectedFile: fileList.join(';'), type: type, description: desc }
postExportDataset(exportUrl, params, getProgressUrl)
} else {
$('.ui.export_dataset.modal .error.message').text(`${i18n['exportDataset']['please_select_file']}`).show()
}
return false;
},
onShow: function () {
$(`.ui.accordion #${version_name}-export-dataset .export-popup`).hide()
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
},
onHide: function () {
$('.ui.modal.export_dataset #container').off("click")
$('#export-dataset-select').off("click")
$('#export-dataset-type').off("click")
$('.ui.export_dataset.modal .error.message').text('').hide()
}
})
.modal("show")
.modal('setting', 'closable', false)
} else {
$(".ui.no_export_dataset.modal").modal({
onShow: function () {
$(`.ui.accordion #${version_name}-export-dataset .export-popup`).hide()
$('.ui.dimmer').css({ "background-color": "rgb(136, 136, 136,0.7)" })
},
}).modal("show")
initShowNoDataset = false
}
})
}
$('#export-dataset-select').on('click', function () {
$(this).find('#model-file-wrap').show()
})
$('#model-file-export').on('click','.delete.icon', function () {
let fileEle = $(this).siblings('span').text()
$(this).parent().remove()
const $parentCheckbox = $('#model-file-result').find(`input[name="${fileEle}"]`).parent()
$parentCheckbox.checkbox('set unchecked')
})
$('.ui.modal.export_dataset #container').on('click', function (e) {
if ($(e.target).closest('#model-file-wrap').length === 0 && $(e.target).closest('#export-dataset-select').length !== 1) {
$(this).find('#model-file-wrap').hide()
}
})
$('#export-dataset-type').on('click', function (e,arg1) {
document.querySelectorAll('#export-dataset-type a').forEach((item) => {
item.classList.remove('active')
})
if (arg1) {
$('#export-dataset-type a')[arg1].classList.add('active')
document.querySelector('input[name="type"]').value=arg1
} else {
e.target.classList.add('active')
document.querySelector('input[name="type"]').value=e.target.dataset.type
}
})
e.stopPropagation();
})
function getInitEXportDataset(getProgressUrl, datasetId, jobId,version_name) {
let setIntervalFlag = false
$('.ui.modal.export_dataset #model-file-export').empty()
$.get(getProgressUrl, { progressId: `${datasetId}_${jobId}_${version_name}` }, (data) => {
const result = data && JSON.parse(data)
if (Object.keys(result).length > 0) {
canExportDataset = false
let fileInitList = Object.keys(result).filter((item) => item !== '##type##')
$($('#export-dataset-type')).trigger('click',`${result['##type##']}`)
fileInitList.forEach((item) => {
if (result[item] === -1) {
let itemHtml = `<div data-index="${item}" class="file_item">
<span class="nowrap" style="width:80%" title="${item}">${item}</span>
<div class="file_error"><i class="ri-close-circle-line failed"></i><span>${i18n['exportDataset']['export_failed']}</span></div>
</div>`
$('.ui.modal.export_dataset #model-file-export').append(itemHtml)
}
if (result[item] === -2) {
let itemHtml = `<div data-index="${item}" class="file_item">
<span class="nowrap" style="width:80%" title="${item}">${item}</span>
<div class="file_error"">
<i class="ri-close-circle-line failed"></i>
<span>${i18n['exportDataset']['export_failed']}</span>
<span data-tooltip="${i18n['exportDataset']['export_has_same_file']}" data-inverted="" data-variation="tiny">
<i class="ri-question-fill question"></i>
</span>
</div>
</div>`
$('.ui.modal.export_dataset #model-file-export').append(itemHtml)
}
if (result[item] === 100) {
let itemHtml = `<div data-index="${item}" class="file_item">
<span class="nowrap" style="width:80%" title="${item}">${item}</span>
<div class="file_success"">
<i class="ri-checkbox-circle-line success" style="vertical-align: middle;"></i>
<span>${i18n['exportDataset']['export_success']}</span>
</div>
</div>`
$('.ui.modal.export_dataset #model-file-export').append(itemHtml)
}
if (result[item] === 0) {
let itemHtml = `<div data-index="${item}" class="file_item">
<span class="nowrap" style="width:80%" title="${item}">${item}</span>
<div class="file_wait""><i class="ri-loader-2-line waiting spin"></i><span>${i18n['exportDataset']['exporting']}</span></div>
</div>`
$('.ui.modal.export_dataset #model-file-export').append(itemHtml)
setIntervalFlag = true
}
})
if (setIntervalFlag) {
timer && clearInterval(timer)
timer = setIntervalImmediately(getProgress, 5000, getProgressUrl, `${datasetId}_${jobId}`)
}
}
})
}
function getModelFileList(Fileurl,version_name,jobId) {
$.get(Fileurl, { versionName: version_name, jobId: jobId }, (data) => {
$('.ui.modal.export_dataset #model-file-result').empty()
let html = ''
if (data.code === 0 && data.files.length !== 0) {
let dataFileList = data.files.sort((a, b) => {
return a.FileName.localeCompare(b.FileName)
})
dataFileList.forEach(element => {
html += `<div class="item">
<div class="ui child checkbox">
<input type="checkbox" name="${element.FileName}">
<label>${element.FileName}</label>
</div>
</div>`
});
$('.ui.modal.export_dataset #model-file-result').append(html)
$('#model-file-result.list .child.checkbox').checkbox({
onChecked: function () {
$('.ui.export_dataset.modal .error.message').text('').hide()
if (!canExportDataset) {
$('.ui.modal.export_dataset #model-file-export').empty()
canExportDataset = true
}
let itemHtml = ''
let fileName = $(this).attr('name')
itemHtml += `<div data-index="${fileName}" class="file_item">
<span class="nowrap" style="width:80%" title="${fileName}">${fileName}</span>
<i class="delete icon" style="cursor:pointer"></i>
</div>`
$('.ui.modal.export_dataset #model-file-export').append(itemHtml)
},
onUnchecked: function() {
let fileName = $(this).attr('name')
$('.ui.modal.export_dataset #model-file-export').find(`div[data-index="${fileName}"]`).remove()

},
})
}
}).fail(function (err) {
console.log(err);
});
}
function getProgress(getProgressUrl,progressId) {
const $statusEle = $('#model-file-export .file_item').find('div')
let fileLength = $statusEle.length
let count = 0
$.get(getProgressUrl, { progressId: progressId }, (data) => {
const result = data && JSON.parse(data)
let sortResult = Object.keys(result).sort((a,b) => {
return fileList.indexOf(a)-fileList.indexOf(b)
})
let filterResult = sortResult.filter((item) => item !== '##type##')
filterResult.forEach((item, index) => {
console.log(item)
if (result[item] === -1) {

if (!$($statusEle[index]).hasClass('file_error')) {
$($statusEle[index]).replaceWith(`<div class="file_error"><i class="ri-close-circle-line failed"></i><span>${i18n['exportDataset']['export_failed']}</span></div>`)
}
count++
}
if (result[item] === -2) {
if (!$($statusEle[index]).hasClass('file_error')) {
$($statusEle[index]).replaceWith(`<div class="file_error""><i class="ri-close-circle-line failed"></i><span>${i18n['exportDataset']['export_failed']}</span><span data-tooltip="${i18n['exportDataset']['export_has_same_file']}" data-inverted="" data-variation="tiny"><i class="ri-question-fill question"></i></span></div>`)
}
count++
}
if (result[item] === 100 ) {
if (!$($statusEle[index]).hasClass('file_success')) {
$($statusEle[index]).replaceWith(`<div class="file_success""><i class="ri-checkbox-circle-line success" style="vertical-align: middle;"></i><span>${i18n['exportDataset']['export_success']}</span></div>`)
}
count++
}
console.log("count:",count)
if (count === fileLength) {
$(".ui.export_dataset.modal").modal('refresh')
timer && clearInterval(timer)
$('#export-dataset-select').on('click', function () {
$(this).find('#model-file-wrap').show()
})
$('.ui.modal.export_dataset .ui.approve').removeClass('disabled')
fileList = []
canExportDataset = false
$('#model-file-result.list .child.checkbox.checked').each(function () {
$(this).checkbox('set unchecked')
})
return
}
})
})
}
function postExportDataset(url, params, getProgressUrl) {
$.post(url, params, (data) => {
if (data.code === '0') {
$('#model-file-export .delete.icon').each(function () {
$(this).replaceWith(`<div class="file_wait""><i class="ri-loader-2-line waiting spin"></i><span>${i18n['exportDataset']['exporting']}</span></div>`)
})
$('.ui.modal.export_dataset .ui.approve').addClass('disabled')
$('#export-dataset-select').off("click")
// $('.ui.modal.export_dataset').off("click")
getProgress(getProgressUrl,data.progressId)
timer && clearInterval(timer)
timer = setIntervalImmediately(getProgress,5000,getProgressUrl,data.progressId)
}
})
}
function setIntervalImmediately(func, interval,...args) {
func(...args)
return setInterval(func,interval,...args)
};
// $('.ui.pointing.secondary.menu .item:eq(0)').click(function(e) {
// const self = $(this);
// setTimeout(function() {


+ 14
- 0
web_src/js/features/i18nVue.js View File

@@ -155,6 +155,13 @@ export const i18nVue = {
model_should_same_model: '选择同一模型下的文件',
model_suport_file_tips: '模型文件支持的格式为 [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt]',
},
exportDataset: {
export_failed: '导出失败',
export_has_same_file: '当前项目已存在相同的数据集文件',
export_success: '导出成功',
exporting: '正在导出',
please_select_file:'请先选择文件',
}
},
US: {
@@ -316,5 +323,12 @@ export const i18nVue = {
model_should_same_model: 'Select the files should in the same model.',
model_suport_file_tips: 'The supported format of the model file is [ckpt, pb, h5, json, pkl, pth, t7, pdparams, onnx, pbtxt, keras, mlmodel, cfg, pt]',
},
exportDataset: {
export_failed: 'Export failed',
export_has_same_file:'The same dataset file already exists in the current project',
export_success: 'Export success',
exporting: ' Exporting',
please_select_file: 'Please select a file first',
}
},
};

+ 1
- 1
web_src/js/index.js View File

@@ -5198,7 +5198,6 @@ function initcreateRepo() {
initcreateRepo();

function initChartsNpu() {
const repoPath = $('.metric_chart').data('path')
let options = {
legend: {
data: [],
@@ -5244,6 +5243,7 @@ function initChartsNpu() {
const sortBy = (arr, k) =>
arr.concat().sort((a, b) => (a[k] > b[k] ? 1 : a[k] < b[k] ? -1 : 0));
$(".metric_chart").click(function (e) {
const repoPath = $(this).data('path')
let versionName = $(this).data("version");
let myCharts = echarts.init(
document.getElementById(`metric-${versionName}`)


+ 126
- 0
web_src/less/_dataset.less View File

@@ -263,3 +263,129 @@
background: #fff !important;
}
}


.export_dataset{
.content{
.text-tip{
color: #888;
font-size: 12px;
}
.text-tip::before{
content: '*';
color: #f2711c;
}
#export-dataset-select{
cursor: pointer;
color: rgba(3, 102, 214, 100);
font-size: 14px;
line-height: 26px;
font-family: SourceHanSansSC-medium;
position: relative;
#model-file-wrap{
position: absolute;
left: 0px;
padding: 1rem;
background: white;
width: 500px;
max-height: 200px;
overflow: auto;
border: 1px solid rgb(84, 200, 255);
border-radius: 4px;
}
}
#export-dataset-file{
display: inline-block;
width: 80%;
.file_item{
margin:0.5rem 0;
justify-content: space-between;
display: flex;
.file_error,.file_success,.file_wait{
width: 100px;
display: flex;
align-items: center;
.failed{
color: red;
font-size: 16px;
margin-right: .4rem;
}
.success {
color: #21ba45;
font-size: 16px;
margin-right: .4rem;
}
.waiting{
color: #21ba45;
font-size: 16px;
margin-right: .4rem;
}
.question {
font-size: 16px;
margin-left: .4rem;
}
.spin{
animation: spining 3s linear infinite;
}
@-webkit-keyframes spining {
0% { -webkit-transform: rotate(0deg); }
100% { -webkit-transform: rotate(360deg); }
}
@keyframes spining {
0% { -webkit-transform: rotate(0deg); }
100% { -webkit-transform: rotate(360deg); }
}
}
}
}
}
}
.no_export_dataset{
.item-empty {
height: 391px;
width: 100%;
overflow: hidden;
padding: 15px;
background: transparent;
display: flex;
flex-direction: column;
justify-content: center;
background-color: rgba(245, 245, 246, 0.5);
.item-empty-icon {
height: 80px;
width: 100%;
background: url(/img/empty-box.svg) center center no-repeat;
}
.item-empty-tips {
text-align: center;
margin-top: 20px;
font-size: 18px;
color: rgb(63, 63, 64);
}
}
}

.accordion-panel-title-content{
.export-popup{
display: none;
position: absolute;
top: 0;
right: 0;
min-width: -webkit-min-content;
min-width: -moz-min-content;
min-width: min-content;
z-index: 1900;
border: 1px solid #d4d4d5;
line-height: 1.4285em;
max-width: 250px;
background: #fff;
padding: .833em 1em;
font-weight: 400;
font-style: normal;
color: rgba(0,0,0,.87);
border-radius: .28571429rem;
-webkit-box-shadow: 0 2px 4px 0 rgba(34,36,38,.12), 0 2px 10px 0 rgba(34,36,38,.15);
box-shadow: 0 2px 4px 0 rgba(34,36,38,.12), 0 2px 10px 0 rgba(34,36,38,.15);
}
}

+ 1
- 1
web_src/less/openi.less View File

@@ -1373,7 +1373,7 @@ i.SUCCEEDED {
margin-right: 10px;
padding-right: 11px;
text-decoration: none !important;
color: #526ecc;
// color: #526ecc;
cursor: pointer;
display: inline-block;
-moz-user-select: none;


+ 17
- 1
web_src/vuepages/apis/modules/dataset.js View File

@@ -17,6 +17,22 @@ export const putDatasetStar = (url) => {
});
};

export const getModelFile = (params) => {
return service({
url: '',
params: params,
method:"get"
})
}

export const exportExistDataset = (data) => {
return service({
url: '',
data: data,
method:"post"
})
}

/* 选择数据集组件相关 */
// 获取当前仓库的数据集
// params - username, reponame, q, page, type
@@ -121,4 +137,4 @@ export const setCompleteMultipart = (data) => {
params: {},
data: Qs.stringify(data),
});
};
};

+ 2
- 0
web_src/vuepages/langs/config/en-US.js View File

@@ -293,6 +293,7 @@ const en = {
codeBranch: 'Code branch',
bootFile: 'Boot file',
trainDataset: 'Train dataset',
datasetfile: 'Dataset files',
specInfo: 'Specifications',
workServerNumber: 'Amount of compute node',
runParameters: 'Run parameters',
@@ -418,6 +419,7 @@ const en = {
dataset_ok: "OK",
dataset_not_equal_file: "Cannot select a data file with the same name.",
dataset_most: "Up to {msg} files.",
dataset_file_was_deleted: "The file has been deleted",
},
specObj: {
resSelectTips: 'The "resource specification" is the hardware you use to run the task. In order for more people to use the resources of this platform, please select according to your actual needs',


+ 2
- 0
web_src/vuepages/langs/config/zh-CN.js View File

@@ -309,6 +309,7 @@ const zh = {
codeBranch: '代码分支',
bootFile: '启动文件',
trainDataset: '训练数据集',
datasetfile: '数据集文件',
specInfo: '规格',
workServerNumber: '计算节点',
runParameters: '运行参数',
@@ -434,6 +435,7 @@ const zh = {
dataset_ok: "确定",
dataset_not_equal_file: "不能选择相同名称的数据文件",
dataset_most: "最多不超过 {msg} 个文件",
dataset_file_was_deleted: "文件已经被删除",
},
specObj: {
resSelectTips: '「资源规格」是您运行该任务使用的硬件,为了更多人能够使用本平台的资源,请按照您的实际需求进行选择。',


+ 68
- 0
web_src/vuepages/pages/dataset/exportDataset/index.vue View File

@@ -0,0 +1,68 @@
<template>
<div class="content content-padding">
<form id="formId" method="POST" class="ui form">
<div class="ui error message">
</div>
<input type="hidden" name="trainTaskCreate" value="true">
<div class="inline required field">
<label class="label_color" for="">可用集群</label>
<div class="ui blue mini menu compact selectcloudbrain" id="adminCommitImage">
<a class="active item" data-type="0">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16">
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M3 2.992C3 2.444 3.445 2 3.993 2h16.014a1 1 0 0 1 .993.992v18.016a.993.993 0 0 1-.993.992H3.993A1 1 0 0 1 3 21.008V2.992zM19 11V4H5v7h14zm0 2H5v7h14v-7zM9 6h6v2H9V6zm0 9h6v2H9v-2z"/>
</svg>
启智CPU/GPU
</a>
<a class="item" data-type="1">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="16" height="16">
<path fill="none" d="M0 0h24v24H0z"/>
<path d="M3 2.992C3 2.444 3.445 2 3.993 2h16.014a1 1 0 0 1 .993.992v18.016a.993.993 0 0 1-.993.992H3.993A1 1 0 0 1 3 21.008V2.992zM19 11V4H5v7h14zm0 2H5v7h14v-7zM9 6h6v2H9V6zm0 9h6v2H9v-2z"/>
</svg>
智算CPU/GPU
</a>
</div>
</div>
<div class="inline required field">
<label class="label_color" for="">文件描述</label>
<textarea style="width: 80%;" required id="description" name="description" rows="3" maxlength="255" placeholder="" onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)"></textarea>
</div>

<div class="inline field" style="margin-left: 75px;">
<button type="button" class="ui create_train_job green button"
style="position: absolute;">
sdfsdfsdf
</button>
</div>
</form>
<div class="actions" style="display: inline-block;margin-left: 180px;">
<button type="button" class="ui button cancel">ccc</button>
</div>
</div>


</template>
<script>
import { getModelFile,exportExistDataset } from "~/apis/modules/dataset";
export default {

data() {
return {

}
},
computed: {},
methods: {
getModelFileLiST(){
getModelFile().then((res)=>{})
}
},
mounted() {

},
}
</script>
<style scoped>

</style>

+ 17
- 0
web_src/vuepages/pages/dataset/exportDataset/vp-export-dataset.js View File

@@ -0,0 +1,17 @@
import Vue from 'vue';
import ElementUI from 'element-ui';
import 'element-ui/lib/theme-chalk/index.css';
import localeEn from 'element-ui/lib/locale/lang/en';
import localeZh from 'element-ui/lib/locale/lang/zh-CN';
import { i18n, lang } from '~/langs';
import App from './index.vue';

Vue.use(ElementUI, {
locale: lang === 'zh-CN' ? localeZh : localeEn,
size: 'small',
});

new Vue({
i18n,
render: (h) => h(App),
}).$mount('#__vue-root');

+ 10
- 8
web_src/vuepages/pages/modelmanage/graph/model-graph.css View File

@@ -279,8 +279,7 @@

._model-info {
position: fixed;
width: 300px;
height: 258px;
width: 326px;
border-color: rgb(225, 227, 230);
border-width: 1px;
border-style: solid;
@@ -301,11 +300,10 @@
display: flex;
padding-left: 2px;
margin: 4px 0;
align-items: center;
}

._model-info .row>div:nth-child(1) {
width: 66px;
width: 88px;
color: rgb(136, 136, 136);
text-align: right;
}
@@ -313,12 +311,16 @@
._model-info .row>div:nth-child(2) {
flex: 1;
color: rgb(16, 16, 16);
width: 0;
}

._model-info .row .dataset-c {
width: 100%;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}

:lang(en-US) ._model-info .row>div:nth-child(1) {
width: 120px;
}

:lang(en-US) ._model-info {
width: 326px;
}

+ 17
- 1
web_src/vuepages/pages/modelmanage/graph/model-graph.js View File

@@ -275,6 +275,18 @@ ModelGraph.prototype.showModelNodeInfo = function (nodeEl, nodeData, showOrHide)
modelObj.trainJobDuration = trainTaskInfo.TrainJobDuration;
modelObj.sepcStr = typeof specObj == 'object' ? renderSpecStr(specObj, false) : specObj;
}
if (modelObj.datasetInfo) {
let datasetHtml = '';
for (let i = 0, iLen = modelObj.datasetInfo.length; i < iLen; i++) {
const dataset = modelObj.datasetInfo[i];
if (dataset.is_delete) {
datasetHtml += `<div class="dataset-c"><span title="${dataset.dataset_name}(${i18n.t('datasetObj.dataset_file_was_deleted')})">${dataset.dataset_name}(${i18n.t('datasetObj.dataset_file_was_deleted')})</span></div>`;
} else {
datasetHtml += `<div class="dataset-c"><a target="_blank" href="${dataset.repository_link}" title="${dataset.dataset_name}">${dataset.dataset_name}</a></div>`;
}
}
modelObj.datasetHtml = datasetHtml;
}
modelObj = {
...modelObj,
engineName: getListValueWithKey(MODEL_ENGINES, model.engine.toString()),
@@ -308,6 +320,10 @@ ModelGraph.prototype.showModelNodeInfo = function (nodeEl, nodeData, showOrHide)
<div>${i18n.t('trainTask')}:</div>
<div>${modelObj.displayJobNameHtml || '--'}</div>
</div>
<div class="row">
<div>${i18n.t('modelManage.datasetfile')}:</div>
<div>${modelObj.datasetHtml || '--'}</div>
</div>
<div class="row">
<div>${i18n.t('trainDuration')}:</div>
<div>${modelObj.trainJobDuration || '--'}</div>
@@ -318,7 +334,7 @@ ModelGraph.prototype.showModelNodeInfo = function (nodeEl, nodeData, showOrHide)
</div>`;
const posInfo = nodeEl.getBoundingClientRect();
const winW = window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth;
showInfoEl.style.top = Math.max(posInfo.top - 258 + 6, 20) + 'px';
showInfoEl.style.top = Math.max(posInfo.top - 280 + 6, 20) + 'px';
showInfoEl.style.left = Math.min(posInfo.left - 6 + posInfo.width, winW - 250 - 20) + 'px';
document.querySelector('body').append(showInfoEl);
const mouseEnter = function () {


+ 8
- 8
webpack.config.js View File

@@ -267,14 +267,14 @@ module.exports = {
filename: 'css/[name].css',
chunkFilename: 'css/[name].css',
}),
...(isWatching ? [new SourceMapDevToolPlugin({
filename: 'js/[name].js.map',
include: [
'js/index.js',
/js\/vp-.*\.js/,
],
})] : [])
,
// ...(isWatching ? [new SourceMapDevToolPlugin({
// filename: 'js/[name].js.map',
// include: [
// 'js/index.js',
// /js\/vp-.*\.js/,
// ],
// })] : [])
// ,
new SpriteLoaderPlugin({
plainSprite: true,
}),


Loading…
Cancel
Save