#5422 合入相关Issue修改

Merged
ychao_1983 merged 14 commits from zouap_dev into V20240423 3 weeks ago
  1. +3
    -3
      models/user_analysis_for_activity.go
  2. +11
    -5
      models/user_business_analysis.go
  3. +1
    -0
      options/locale/locale_en-US.ini
  4. +1
    -0
      options/locale/locale_zh-CN.ini
  5. +4
    -4
      routers/ai_task/ai_task.go
  6. +6
    -2
      routers/api/v1/repo/attachments.go
  7. +55
    -17
      routers/repo/ai_model_convert.go
  8. +7
    -0
      routers/repo/user_data_analysis.go
  9. +5
    -5
      services/ai_task_service/task/grampus_online_infer_task.go

+ 3
- 3
models/user_analysis_for_activity.go View File

@@ -593,10 +593,10 @@ func QueryUserAnnualReport(userId int64) *UserSummaryCurrentYear {
func GetLastModifyTime() string {
statictisSess := xStatistic.NewSession()
defer statictisSess.Close()
userBusinessAnalysisLastMonth := &UserBusinessAnalysisLastMonth{}
err := statictisSess.Select("*").Table(new(UserBusinessAnalysisLastMonth)).Limit(1, 0).Find(userBusinessAnalysisLastMonth)
reList := make([]*UserBusinessAnalysisLastMonth, 0)
err := statictisSess.Select("*").Table(new(UserBusinessAnalysisLastMonth)).Limit(1, 0).Find(&reList)
if err == nil {
return userBusinessAnalysisLastMonth.DataDate
return reList[0].DataDate
}
return ""
}

+ 11
- 5
models/user_business_analysis.go View File

@@ -2222,15 +2222,21 @@ func queryLoginActionCount(start_unix int64, end_unix int64) map[int64]int {
var indexTotal int64
indexTotal = 0
for {
statictisSess.Select("id,u_id").Table("user_login_action_log").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
statictisSess.Select("id,u_id,created_unix").Table("user_login_action_log").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
userLoginActionLogList := make([]*UserLoginActionLog, 0)
statictisSess.Find(&userLoginActionLogList)
log.Info("query user login action size=" + fmt.Sprint(len(userLoginActionLogList)))
cachemap := make(map[string]int)

for _, loginRecord := range userLoginActionLogList {
if _, ok := resultMap[loginRecord.UId]; !ok {
resultMap[loginRecord.UId] = 1
} else {
resultMap[loginRecord.UId] += 1
strkey := loginRecord.CreatedUnix.FormatShort() + fmt.Sprintf("d%", loginRecord.UId)
if _, ok := cachemap[strkey]; !ok {
if _, ok := resultMap[loginRecord.UId]; !ok {
resultMap[loginRecord.UId] = 1
} else {
resultMap[loginRecord.UId] += 1
}
cachemap[strkey] = 1
}
}
indexTotal += PAGE_SIZE


+ 1
- 0
options/locale/locale_en-US.ini View File

@@ -584,6 +584,7 @@ static.commentcount=Comment Count
static.focusrepocount=Focus Repo Count
static.starrepocount=Repo Star Count
static.logincount=Login Count
static.loginactioncount=Activity Day
static.watchedcount=Watched Count
static.commitcodesize=Commit Code Line
static.solveissuecount=Solve Issue Count


+ 1
- 0
options/locale/locale_zh-CN.ini View File

@@ -588,6 +588,7 @@ static.commentcount=评论数
static.focusrepocount=关注项目数
static.starrepocount=点赞项目数
static.logincount=登录次数
static.loginactioncount=活跃天数
static.watchedcount=关注者数
static.commitcodesize=commit代码行数
static.solveissuecount=已解决任务数


+ 4
- 4
routers/ai_task/ai_task.go View File

@@ -433,10 +433,10 @@ func GetImageInfoBySelectedSpec(ctx *context.Context) {
func GetCreationRequiredInfo(ctx *context.Context) {
jobType := ctx.Query("job_type")
var isOnlineType bool
if models.JobType(jobType) == (models.JobTypeOnlineInference) {
isOnlineType = true
jobType = string(models.JobTypeDebug)
}
// if models.JobType(jobType) == (models.JobTypeOnlineInference) {
// isOnlineType = true
// jobType = string(models.JobTypeDebug)
// }
log.Info("required jobType=" + jobType)
computeSourceName := ctx.Query("compute_source")
clusterType := ctx.Query("cluster_type")


+ 6
- 2
routers/api/v1/repo/attachments.go View File

@@ -101,7 +101,9 @@ func NewMultipart(ctx *context.APIContext) {
"msg": err.Error(),
})
} else {
routeRepo.AddFileNameToCache(datasetId, fileName, ctx.User.ID)
if !ignore {
routeRepo.AddFileNameToCache(datasetId, fileName, ctx.User.ID)
}
re["result_code"] = "0"
ctx.JSON(200, re)
}
@@ -193,7 +195,9 @@ func NewModelMultipart(ctx *context.APIContext) {
"msg": err.Error(),
})
} else {
routeRepo.AddModelFileNameToCache(modeluuid, fileName, ctx.User.ID)
if !ignore {
routeRepo.AddModelFileNameToCache(modeluuid, fileName, ctx.User.ID)
}
re["result_code"] = "0"
ctx.JSON(200, re)
}


+ 55
- 17
routers/repo/ai_model_convert.go View File

@@ -385,33 +385,44 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
dataActualPath = setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/dataset"
}
log.Info("dataActualPath=" + dataActualPath)

bootfile := ""
runParms := make(map[string]interface{}, 0)
if modelConvert.SrcEngine == PYTORCH_ENGINE {
if modelConvert.DestFormat == CONVERT_FORMAT_ONNX {
command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchOnnxBootFile)
bootfile = setting.ModelConvert.PytorchOnnxBootFile
runParms = getGpuModelConvertRunParams(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchOnnxBootFile)
//command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchOnnxBootFile)
} else if modelConvert.DestFormat == CONVERT_FORMAT_TRT {
command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchTrTBootFile)
bootfile = setting.ModelConvert.PytorchTrTBootFile
runParms = getGpuModelConvertRunParams(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchTrTBootFile)
//command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PytorchTrTBootFile)
} else {
return errors.New("Not support the format.")
}
} else if modelConvert.SrcEngine == TENSORFLOW_ENGINE {
IMAGE_URL = setting.ModelConvert.GPU_TENSORFLOW_IMAGE
if modelConvert.DestFormat == CONVERT_FORMAT_ONNX {
command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.TensorFlowGpuBootFile)
bootfile = setting.ModelConvert.TensorFlowGpuBootFile
runParms = getGpuModelConvertRunParams(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.TensorFlowGpuBootFile)
//command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.TensorFlowGpuBootFile)
} else {
return errors.New("Not support the format.")
}
} else if modelConvert.SrcEngine == PADDLE_ENGINE {
IMAGE_URL = setting.ModelConvert.GPU_PADDLE_IMAGE
if modelConvert.DestFormat == CONVERT_FORMAT_ONNX {
command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PaddleOnnxBootFile)
bootfile = setting.ModelConvert.PaddleOnnxBootFile
runParms = getGpuModelConvertRunParams(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PaddleOnnxBootFile)
//command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.PaddleOnnxBootFile)
} else {
return errors.New("Not support the format.")
}
} else if modelConvert.SrcEngine == MXNET_ENGINE {
IMAGE_URL = setting.ModelConvert.GPU_MXNET_IMAGE
if modelConvert.DestFormat == CONVERT_FORMAT_ONNX {
command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.MXnetOnnxBootFile)
bootfile = setting.ModelConvert.MXnetOnnxBootFile
runParms = getGpuModelConvertRunParams(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.MXnetOnnxBootFile)
//command = getGpuModelConvertCommand(modelConvert.ID, modelConvert.ModelPath, modelConvert, setting.ModelConvert.MXnetOnnxBootFile)
} else {
return errors.New("Not support the format.")
}
@@ -505,7 +516,7 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
},
},
PreTrainModel: nil,
BootFile: "",
BootFile: bootfile,
OutPut: []entity.ContainerData{{
ContainerPath: "/tmp/output",
ReadOnly: false,
@@ -534,7 +545,7 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context
reqJson, _ := json.Marshal(req)
log.Info("reqJson=" + string(reqJson))

jobResult, err := createGrampusTrainJob(req, command)
jobResult, err := createGrampusTrainJob(req, command, runParms)

if err != nil {
log.Error("CreateJob failed:", err.Error(), ctx.Data["MsgID"])
@@ -594,8 +605,8 @@ func getGrampusTrainTaskConfig() *entity.AITaskBaseConfig {
return config
}

func createGrampusTrainJob(req entity.CreateTrainTaskRequest, exeCommand string) (*models.CreateGrampusJobResponse, error) {
jobResult, err := grampus.CreateJob(convertTrainReq2Grampus(req, exeCommand))
func createGrampusTrainJob(req entity.CreateTrainTaskRequest, exeCommand string, runParam map[string]interface{}) (*models.CreateGrampusJobResponse, error) {
jobResult, err := grampus.CreateJob(convertTrainReq2Grampus(req, exeCommand, runParam))
if err != nil {
log.Error("CreateNoteBook failed: %v", err.Error())
return nil, err
@@ -603,19 +614,19 @@ func createGrampusTrainJob(req entity.CreateTrainTaskRequest, exeCommand string)
return jobResult, nil
}

func convertTrainReq2Grampus(req entity.CreateTrainTaskRequest, exeCommand string) models.CreateGrampusJobRequest {
command := generateGrampusTrainCommand(req, exeCommand)
func convertTrainReq2Grampus(req entity.CreateTrainTaskRequest, exeCommand string, runParam map[string]interface{}) models.CreateGrampusJobRequest {
//command := generateGrampusTrainCommand(req, exeCommand)
command := ""
tasks := make([]models.GrampusTasks, len(req.Tasks))
for i := 0; i < len(req.Tasks); i++ {
t := req.Tasks[i]
tasks[i] = convertTrainTask2Grampus(t, command)
tasks[i] = convertTrainTask2Grampus(t, command, runParam)
}

return models.CreateGrampusJobRequest{Name: req.Name, Tasks: tasks}
}

func convertTrainTask2Grampus(t entity.TrainTask, command string) models.GrampusTasks {
func convertTrainTask2Grampus(t entity.TrainTask, command string, runParam map[string]interface{}) models.GrampusTasks {
return models.GrampusTasks{
Name: t.Name,
ResourceSpecId: t.ResourceSpecId,
@@ -630,6 +641,7 @@ func convertTrainTask2Grampus(t entity.TrainTask, command string) models.Grampus
BootFile: t.BootFile,
OutPut: convertContainerArray2Grampus(t.OutPut),
WorkServerNumber: t.WorkServerNumber,
RunParams: runParam,
}
}

@@ -713,8 +725,7 @@ func buildUnzipCodeCommand(codeConfigPath, codeFilePath, computeSource string) *
Next(entity.NewCommand("cd", codeConfigPath)).
Next(entity.NewCommand("unzip", "-q", codeFilePath)).
Next(entity.NewCommand("echo", "'unzip code finished'")).
Next(entity.NewCommand("ls", "-l")).
Next(entity.NewCommand("ls", "-l", "mnist_pytorchexample_gpu"))
Next(entity.NewCommand("ls", "-l"))
return builder
}
func buildUnzipDatasetCommand(datasets []entity.ContainerData, datasetPath, computeSource string) *entity.CommandBuilder {
@@ -806,6 +817,33 @@ func getGpuModelConvertCommand(name string, modelFile string, modelConvert *mode
return command
}

func getGpuModelConvertRunParams(name string, modelFile string, modelConvert *models.AiModelConvert, bootfile string) map[string]interface{} {
re := make(map[string]interface{}, 0)
inputshape := strings.Split(modelConvert.InputShape, ",")
n := "256"
c := "1"
h := "28"
w := "28"
if len(inputshape) == 4 {
n = inputshape[0]
c = inputshape[1]
h = inputshape[2]
w = inputshape[3]
}
re["model"] = modelFile
re["n"] = n
re["c"] = c
re["h"] = h
re["w"] = w
if modelConvert.DestFormat == CONVERT_FORMAT_TRT {
if modelConvert.NetOutputFormat == NetOutputFormat_FP16 {
re["fp16"] = "True"

}
}
return re
}

func DeleteModelConvert(ctx *context.Context) {
log.Info("delete model convert start.")
id := ctx.Params(":id")


+ 7
- 0
routers/repo/user_data_analysis.go View File

@@ -123,6 +123,7 @@ func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader = append(excelHeader, ctx.Tr("user.static.email"))
excelHeader = append(excelHeader, ctx.Tr("user.static.phone"))
excelHeader = append(excelHeader, ctx.Tr("user.static.location"))
excelHeader = append(excelHeader, ctx.Tr("user.static.loginactioncount"))

excelHeader = append(excelHeader, ctx.Tr("user.static.registdate"))
excelHeader = append(excelHeader, ctx.Tr("user.static.countdate"))
@@ -211,6 +212,9 @@ func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *mode
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation)
tmp = tmp + 1

xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginActionCount)
tmp = tmp + 1

formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
tmp = tmp + 1
@@ -291,6 +295,9 @@ func writeExcelPage(row int, xlsx *excelize.File, sheetName string, userRecord *
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.UserLocation)
tmp = tmp + 1

xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginActionCount)
tmp = tmp + 1

formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
tmp = tmp + 1


+ 5
- 5
services/ai_task_service/task/grampus_online_infer_task.go View File

@@ -85,7 +85,7 @@ func (t GrampusOnlineInferTaskTemplate) Create(ctx *context.CreationContext) (*e
AsyncNextWithErrFun(t.BuildContainerData, t.GetAvailableQueues, t.CallCreationAPI, t.AfterCallCreationAPI4Async, t.NotifyCreation, t.HandleErr4Async).
Operate(ctx)
if err != nil {
log.Error("create GrampusNoteBookTask err.%v", err)
log.Error("create CreateOnlineInfer err.%v", err)
return nil, err
}
return &entity.CreateTaskRes{ID: ctx.NewCloudbrain.ID}, nil
@@ -134,11 +134,11 @@ func (g GrampusOnlineInferTaskTemplate) CallCreationAPI(ctx *context.CreationCon
createTime := timeutil.TimeStampNow()
res, err := c.CreateOnlineInfer(req)
if err != nil {
log.Error("GrampusNoteBookTask CreateNoteBook err.req=%+v err=%v", req, err)
log.Error("GrampusNoteBookTask CreateOnlineInfer err.req=%+v err=%v", req, err)
return response.NewBizError(err)
}
if res.JobID == "" {
log.Error("GrampusNoteBookTask CreateNoteBook failed.Cloudbrain.JobID=%s", ctx.SourceCloudbrain.JobID)
log.Error("GrampusNoteBookTask CreateOnlineInfer failed.Cloudbrain.JobID=%s", ctx.SourceCloudbrain.JobID)
return response.CREATE_FAILED
}
ctx.Response = &entity.CreationResponse{
@@ -152,7 +152,7 @@ func (g GrampusOnlineInferTaskTemplate) CallCreationAPI(ctx *context.CreationCon
func (g GrampusOnlineInferTaskTemplate) LoadSpec(ctx *context.CreationContext) *response.BizError {
//check specification
spec, err := resource.GetAndCheckSpec(ctx.User.ID, ctx.Request.SpecId, models.FindSpecsOptions{
JobType: models.JobTypeDebug,
JobType: models.JobTypeOnlineInference,
ComputeResource: ctx.Request.ComputeSource.Name,
Cluster: ctx.Request.Cluster.GetParentCluster(),
HasInternet: ctx.Request.HasInternet,
@@ -167,7 +167,7 @@ func (g GrampusOnlineInferTaskTemplate) LoadSpec(ctx *context.CreationContext) *
func (GrampusOnlineInferTaskTemplate) GetAvailableQueues(ctx *context.CreationContext) *response.BizError {
ctx.Queues = ctx.Spec.GetAvailableQueues(models.GetAvailableCenterIdOpts{
UserId: ctx.User.ID,
JobType: models.JobTypeDebug,
JobType: models.JobTypeOnlineInference,
HasInternet: ctx.Request.HasInternet,
})
return nil


Loading…
Cancel
Save