diff --git a/entity/ai_task.go b/entity/ai_task.go index ea768ebe3d..5e4a2d8f0c 100644 --- a/entity/ai_task.go +++ b/entity/ai_task.go @@ -1,6 +1,7 @@ package entity import ( + "archive/zip" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/storage" "encoding/json" @@ -275,7 +276,14 @@ type GetAllOutputReq struct { Suffix []string } -type GetOutputDownloadInfoReq struct { +type DownloadAllFileReq struct { + CloudbrainId int64 + FileName string + ParentDir string + ZIPWriter *zip.Writer +} + +type GetSingleDownloadInfoReq struct { CloudbrainId int64 FileName string ParentDir string diff --git a/entity/cluster.go b/entity/cluster.go index 43f7e9f161..ce65cc669e 100644 --- a/entity/cluster.go +++ b/entity/cluster.go @@ -1,6 +1,7 @@ package entity import ( + "archive/zip" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/cloudbrain" "code.gitea.io/gitea/modules/log" @@ -318,7 +319,14 @@ type ClusterLogDownloadInfoOpts struct { DisplayJobName string } -type ClusterOutputDownloadInfoOpts struct { +type DownloadOutputOpts struct { + JobId string + Path string + JobName string + StorageType StorageType + ZIPWriter *zip.Writer +} +type ClusterSingleOutputDownloadInfoOpts struct { JobId string Path string JobName string diff --git a/models/cloudbrain.go b/models/cloudbrain.go index f6eb5d14f2..a77c190995 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -2267,6 +2267,12 @@ func QueryModelTrainJobList(repoId int64) ([]*Cloudbrain, int, error) { return uniqueElements, int(len(uniqueElements)), nil } +func CountByRawSql(sql string) (int64, error) { + return x.SQL(sql).Count() +} +func QueryByRawSql(sql string) ([]map[string]string, error) { + return x.QueryString(sql) +} func CloudbrainsVersionList(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int, error) { sess := x.NewSession() diff --git a/models/repo_activity_custom.go b/models/repo_activity_custom.go index 26b2ea14f7..e2ddddbddd 100644 --- a/models/repo_activity_custom.go +++ b/models/repo_activity_custom.go @@ -238,12 +238,8 @@ func GetAllUserPublicRepoKPIStats(startTime time.Time, endTime time.Time) (map[s CommitLines: 0, } } - if value.Email == "1250125907@qq.com" || value.Email == "peiyongyu-34@163.com" { - log.Info("repo path=" + repository.RepoPath()) - } authors[key].Commits += value.Commits authors[key].CommitLines += value.CommitLines - } } diff --git a/models/role.go b/models/role.go index c4bb4863b6..35136d182e 100644 --- a/models/role.go +++ b/models/role.go @@ -11,6 +11,7 @@ type RoleType string const ( TechProgramAdmin RoleType = "TechProgramAdmin" RewardPointAdmin RoleType = "RewardPointAdmin" + MonitorAdmin RoleType = "MonitorAdmin" ) type Role struct { diff --git a/models/user_business_analysis.go b/models/user_business_analysis.go index b01080cc6a..9f2e416a4d 100644 --- a/models/user_business_analysis.go +++ b/models/user_business_analysis.go @@ -10,6 +10,7 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/timeutil" @@ -345,8 +346,8 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi log.Info("query commit code errr.") } else { log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) - CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) - log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) + //CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) + //log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) } CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) @@ -366,7 +367,7 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi defer statictisSess.Close() cond := "type != 1 and is_active=true" - count, err := sess.Where(cond).Count(new(User)) + count, _ := sess.Where(cond).Count(new(User)) ParaWeight := getParaWeight() ResultList := make([]*UserBusinessAnalysis, 0) @@ -377,10 +378,10 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi userList := make([]*User, 0) sess.Find(&userList) - for i, userRecord := range userList { + for _, userRecord := range userList { var dateRecord UserBusinessAnalysis dateRecord.ID = userRecord.ID - log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) + //log.Info("i=" + fmt.Sprint(i) + " userName=" + userRecord.Name) dateRecord.CountDate = CountDate.Unix() dateRecord.DataDate = DataDate dateRecord.Email = userRecord.Email @@ -573,6 +574,37 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS statictisSess := xStatistic.NewSession() defer statictisSess.Close() + + var CommitCodeSizeMap map[string]*git.UserKPIStats + var err error + var existCommitCodeSize map[int64]int + if tableName == "user_business_analysis_all" || tableName == "user_business_analysis_current_year" { + + oneDayStartTime := pageEndTime.AddDate(0, 0, -1) + if oneDayStartTime.Format("2006-01-02") == pageStartTime.Format("2006-01-02") { + existCommitCodeSize = make(map[int64]int, 0) + } else { + existCommitCodeSize = queryCommitCodeSizeFromDb("public." + tableName) + } + log.Info("GetAllUserKPIStats oneDayStartTime=" + oneDayStartTime.Format("2006-01-02 15:04:05")) + log.Info("GetAllUserKPIStats pageEndTime=" + pageEndTime.Format("2006-01-02 15:04:05")) + log.Info("existCommitCodeSize len=" + fmt.Sprint(len(existCommitCodeSize))) + CommitCodeSizeMap, err = GetAllUserKPIStats(oneDayStartTime, pageEndTime) + if err != nil { + log.Info("query commit code errr.") + } else { + log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) + } + } else { + existCommitCodeSize = make(map[int64]int, 0) + CommitCodeSizeMap, err = GetAllUserKPIStats(pageStartTime, pageEndTime) + if err != nil { + log.Info("query commit code errr.") + } else { + log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) + } + } + log.Info("truncate all data from table: " + tableName) statictisSess.Exec("TRUNCATE TABLE " + tableName) @@ -593,15 +625,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS FocusRepoCountMap := queryWatch(start_unix, end_unix) StarRepoCountMap := queryStar(start_unix, end_unix) WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix) - CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime) - if err != nil { - log.Info("query commit code errr.") - } else { - log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) - //CommitCodeSizeMapJson, _ := json.Marshal(CommitCodeSizeMap) - //log.Info("CommitCodeSizeMapJson=" + string(CommitCodeSizeMapJson)) - } - //CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix()) + CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix) SolveIssueCountMap := querySolveIssue(start_unix, end_unix) CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix) @@ -659,11 +683,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap) dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap) if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok { - dateRecordAll.CommitCodeSize = 0 + dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, existCommitCodeSize) } else { - dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines) + dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines) + getMapValue(dateRecordAll.ID, existCommitCodeSize) } - //dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) @@ -1787,6 +1810,41 @@ func queryMostActiveCommitAction(start_unix int64, end_unix int64) map[int64]map return mostActiveMap } +func queryCommitCodeSizeFromDb(tableName string) map[int64]int { + statictisSess := xStatistic.NewSession() + defer statictisSess.Close() + resultMap := make(map[int64]int) + count, err := statictisSess.Table(tableName).Count() + if err != nil { + log.Info("query " + tableName + " error. return." + err.Error()) + return resultMap + } + var indexTotal int64 + indexTotal = 0 + for { + commit_code_sizeList, err := statictisSess.QueryInterface("select id,commit_code_size from " + tableName + " order by id asc limit " + fmt.Sprint(PAGE_SIZE) + " offset " + fmt.Sprint(indexTotal)) + if err != nil { + log.Info("error:" + err.Error()) + continue + } + log.Info("query " + tableName + " size=" + fmt.Sprint(len(commit_code_sizeList))) + for _, record := range commit_code_sizeList { + userId := convertInterfaceToInt64(record["id"]) + commit_code_size := convertInterfaceToInt64(record["commit_code_size"]) + if _, ok := resultMap[userId]; !ok { + resultMap[userId] = int(commit_code_size) + } else { + resultMap[userId] += int(commit_code_size) + } + } + indexTotal += PAGE_SIZE + if indexTotal >= count { + break + } + } + return resultMap +} + func queryCommitAction(start_unix int64, end_unix int64, actionType int64) map[int64]int { sess := x.NewSession() defer sess.Close() @@ -2130,39 +2188,6 @@ func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int { return userIdImageMap } -func queryAllImage() (map[int64]int64, map[int64]int64) { - sess := x.NewSession() - defer sess.Close() - imageUserIdMap := make(map[int64]int64) - userIdDImageMap := make(map[int64]int64) - count, err := sess.Count(new(Image)) - if err != nil { - log.Info("query image error. return.") - return imageUserIdMap, userIdDImageMap - } - var indexTotal int64 - indexTotal = 0 - for { - sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) - imageList := make([]*Image, 0) - sess.Find(&imageList) - log.Info("query imageList size=" + fmt.Sprint(len(imageList))) - for _, imageRecord := range imageList { - imageUserIdMap[imageRecord.ID] = imageRecord.UID - if _, ok := userIdDImageMap[imageRecord.UID]; !ok { - userIdDImageMap[imageRecord.UID] = 1 - } else { - userIdDImageMap[imageRecord.UID] += 1 - } - } - indexTotal += PAGE_SIZE - if indexTotal >= count { - break - } - } - return imageUserIdMap, userIdDImageMap -} - func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) { sess := x.NewSession() defer sess.Close() @@ -2425,40 +2450,6 @@ func queryLoginCount(start_unix int64, end_unix int64) map[int64]int { return resultMap } -func queryCommitCodeSize(start_unix int64, end_unix int64) map[int64]int { - statictisSess := xStatistic.NewSession() - defer statictisSess.Close() - - resultMap := make(map[int64]int) - cond := "count_date>=" + fmt.Sprint(start_unix) + " and count_date<=" + fmt.Sprint(end_unix) - count, err := statictisSess.Where(cond).Count(new(UserBusinessAnalysis)) - if err != nil { - log.Info("query commit code size error. return.") - return resultMap - } - var indexTotal int64 - indexTotal = 0 - for { - statictisSess.Select("id,commit_code_size").Table("user_business_analysis").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) - userBusinessAnalysisList := make([]*UserBusinessAnalysis, 0) - statictisSess.Find(&userBusinessAnalysisList) - log.Info("query user login size=" + fmt.Sprint(len(userBusinessAnalysisList))) - for _, analysisRecord := range userBusinessAnalysisList { - if _, ok := resultMap[analysisRecord.ID]; !ok { - resultMap[analysisRecord.ID] = analysisRecord.CommitCodeSize - } else { - resultMap[analysisRecord.ID] += analysisRecord.CommitCodeSize - } - } - indexTotal += PAGE_SIZE - if indexTotal >= count { - break - } - } - log.Info("user commit code size=" + fmt.Sprint(len(resultMap))) - return resultMap -} - func queryUserModel(start_unix int64, end_unix int64) map[int64]int { sess := x.NewSession() defer sess.Close() diff --git a/modules/auth/wechat/cloudbrain.go b/modules/auth/wechat/cloudbrain.go index 71dae27a4e..2e1b9a8840 100644 --- a/modules/auth/wechat/cloudbrain.go +++ b/modules/auth/wechat/cloudbrain.go @@ -142,11 +142,11 @@ func getCloudbrainTemplateUrl(cloudbrain models.Cloudbrain, repo *models.Reposit url += "/cloudbrain/benchmark/" + fmt.Sprint(cloudbrain.ID) case string(models.JobTypeTrain): if cloudbrain.Type == models.TypeCloudBrainOne { - url += "/cloudbrain/train-job/" + fmt.Sprint(cloudbrain.JobID) + url += "/cloudbrain/train-job/" + fmt.Sprint(cloudbrain.ID) } else if cloudbrain.Type == models.TypeCloudBrainTwo { - url += "/modelarts/train-job/" + fmt.Sprint(cloudbrain.JobID) + url += "/modelarts/train-job/" + fmt.Sprint(cloudbrain.ID) } else if cloudbrain.Type == models.TypeC2Net { - url += "/grampus/train-job/" + fmt.Sprint(cloudbrain.JobID) + url += "/grampus/train-job/" + fmt.Sprint(cloudbrain.ID) } case string(models.JobTypeInference): url += "/modelarts/inference-job/" + fmt.Sprint(cloudbrain.JobID) diff --git a/modules/git/blame.go b/modules/git/blame.go index 5a9ae9a74f..3a365e6dea 100644 --- a/modules/git/blame.go +++ b/modules/git/blame.go @@ -66,7 +66,9 @@ func (r *BlameReader) NextPart() (*BlamePart, error) { } } else if line[0] == '\t' { code := line[1:] - + if blamePart == nil { + continue + } blamePart.Lines = append(blamePart.Lines, code) } } diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 4e158cc730..e25ed4ba46 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -498,7 +498,6 @@ var ( RefNameOfNoticeRepo string TreePathOfNoticeRepo string CacheTimeOutSecond int - CacheOn bool //labelsystem config LabelTaskName string @@ -1545,7 +1544,6 @@ func NewContext() { RefNameOfNoticeRepo = sec.Key("REF_NAME").MustString("master") TreePathOfNoticeRepo = sec.Key("TREE_PATH").MustString("notice.json") CacheTimeOutSecond = sec.Key("CACHE_TIME_OUT_SECOND").MustInt(60) - CacheOn = sec.Key("CACHE_ON").MustBool(true) sec = Cfg.Section("cloudbrain") CBAuthUser = sec.Key("USER").MustString("") diff --git a/modules/util/lock.go b/modules/util/lock.go new file mode 100644 index 0000000000..44c1799964 --- /dev/null +++ b/modules/util/lock.go @@ -0,0 +1,17 @@ +package util + +import ( + "sync/atomic" +) + +type NonBlockingLock struct { + state int32 +} + +func (m *NonBlockingLock) TryLock() bool { + return atomic.CompareAndSwapInt32(&m.state, 0, 1) +} + +func (m *NonBlockingLock) Unlock() { + atomic.StoreInt32(&m.state, 0) +} diff --git a/modules/util/util.go b/modules/util/util.go index 87dd5b7006..aa9b6c9f3f 100755 --- a/modules/util/util.go +++ b/modules/util/util.go @@ -125,3 +125,13 @@ func cutNameString(str string, lens int) string { } return str[:lens] } + +func GetTotalPage(total int64, pageSize int) int { + + another := 0 + if int(total)%pageSize != 0 { + another = 1 + } + return int(total)/pageSize + another + +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 576309e045..976e119ab3 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -3276,6 +3276,8 @@ head.project = Repositories head.openi = OpenI head.openi.repo = OpenI Projects head.dataset = Datasets +head.forum=Forum +head.course=Courses foot.council = Council foot.technical_committee = Technical Committee foot.join = Join OpenI diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index dfcaf050eb..4b96661aeb 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -3294,6 +3294,8 @@ head.project=项目 head.openi=启智项目 head.openi.repo = 启智项目 head.dataset=数据集 +head.forum=论坛 +head.course=实训课程 foot.council=理事会 foot.technical_committee=技术委员会 foot.join=加入启智 diff --git a/routers/ai_task/ai_task.go b/routers/ai_task/ai_task.go index 07c34aca3d..d065b6321d 100644 --- a/routers/ai_task/ai_task.go +++ b/routers/ai_task/ai_task.go @@ -1,6 +1,7 @@ package ai_task import ( + "archive/zip" "code.gitea.io/gitea/entity" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/cloudbrain" @@ -13,6 +14,7 @@ import ( "code.gitea.io/gitea/services/ai_task_service/schedule" "code.gitea.io/gitea/services/ai_task_service/task" "net/http" + "net/url" "strings" ) @@ -164,7 +166,7 @@ func DownloadOutputFile(ctx *context.Context) { ctx.JSON(http.StatusOK, response.OuterTrBizError(err, ctx)) return } - res, err := t.GetSingleOutputDownloadInfo(entity.GetOutputDownloadInfoReq{ + res, err := t.GetSingleOutputDownloadInfo(entity.GetSingleDownloadInfoReq{ CloudbrainId: id, FileName: fileName, ParentDir: parentDir, @@ -204,28 +206,20 @@ func DownloadAllOutputFile(ctx *context.Context) { ctx.JSON(http.StatusOK, response.OuterTrBizError(err, ctx)) return } - res, err := t.GetAllOutputDownloadInfo(entity.GetOutputDownloadInfoReq{ + resultFileName := cloudbrain.JobName + ".zip" + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(resultFileName)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + zipWriter := zip.NewWriter(ctx.Resp) + defer zipWriter.Close() + err = t.DownloadAllOutput(entity.DownloadAllFileReq{ CloudbrainId: id, + ZIPWriter: zipWriter, }) if err != nil { - log.Error("GetAllOutputDownloadInfo error.%v", err) + log.Error("DownloadAllOutput error.%v", err) ctx.JSON(http.StatusOK, response.OuterTrBizError(err, ctx)) return } - - if res == nil || res.IsEmpty() { - log.Error("DownloadAllOutputFile error.%v", err) - ctx.JSON(http.StatusNotFound, "") - return - } - - tmpErr := common.WriteDownloadContent2Resp(ctx, res) - if tmpErr != nil { - log.Error("DownloadAITaskLog error.%v", tmpErr) - ctx.JSON(http.StatusOK, response.OuterResponseError(tmpErr)) - return - } - } func GetAITaskInfo(ctx *context.Context) { @@ -492,6 +486,19 @@ func GetAITaskResourceUsage(ctx *context.Context) { } ctx.JSON(http.StatusOK, response.OuterSuccessWithData(r)) } +func GetAllMonitorAITask(ctx *context.Context) { + + fileName := "cloudbrain.csv" + status := ctx.QueryTrim("status") + + ctx.Resp.Header().Set("Content-Disposition", "attachment; filename="+url.QueryEscape(fileName)) + ctx.Resp.Header().Set("Content-Type", "application/octet-stream") + err := task.MonitorTaskFile(status, ctx) + if err != nil { + log.Error("get monitor tasks err", err) + } + +} func RetryModelSchedule(ctx *context.APIContext) { id := ctx.QueryInt64("id") diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go index b0c0e75ece..8815aa951d 100755 --- a/routers/api/v1/api.go +++ b/routers/api/v1/api.go @@ -680,6 +680,18 @@ func RegisterRoutes(m *macaron.Macaron) { m.Post("/markdown", bind(api.MarkdownOption{}), misc.Markdown) m.Post("/markdown/raw", misc.MarkdownRaw) + m.Group("/monitor", func() { + m.Group("/:username/:reponame", func() { + m.Group("/ai_task", func() { + m.Get("", HasRole(models.MonitorAdmin), ai_task.GetAITaskInfo) + m.Get("/log/download", HasRole(models.MonitorAdmin), reqAITaskInRepo(), ai_task.DownloadAITaskLog) + m.Get("/resource_usage", HasRole(models.MonitorAdmin), reqAITaskInRepo(), ai_task.GetAITaskResourceUsage) + }, repoAssignment()) + + }) + m.Get("/ai_tasks", HasRole(models.MonitorAdmin), ai_task.GetAllMonitorAITask) + }) + m.Group("/images", func() { m.Get("/public", repo.GetPublicImages) @@ -1252,6 +1264,7 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/show_model_api", repo.ShowModelManageApi) m.Delete("/delete_model", repo.DeleteModel) m.Get("/downloadall", repo.DownloadModel) + m.Get("/downloadsingle/:ID", repo.DownloadModelSingle) m.Get("/query_model_byId", repo.QueryModelById) m.Get("/query_model_byName", repo.QueryModelByName) m.Get("/query_model_for_predict", repo.QueryModelListForPredict) diff --git a/routers/api/v1/repo/attachments.go b/routers/api/v1/repo/attachments.go index f6733707bf..74d40d822d 100644 --- a/routers/api/v1/repo/attachments.go +++ b/routers/api/v1/repo/attachments.go @@ -102,14 +102,71 @@ func GetAttachment(ctx *context.APIContext) { func GetModelChunks(ctx *context.APIContext) { log.Info("GetModelChunks by api.") + modeluuid := ctx.Query("modeluuid") + model, err := models.QueryModelById(modeluuid) + if err == nil { + if errStr := checkModelPermission(ctx, model); errStr != "" { + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "msg": errStr, + }) + return + } + } else { + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "msg": "model not exist.", + }) + return + } routeRepo.GetModelChunks(ctx.Context) } func NewModelMultipart(ctx *context.APIContext) { log.Info("NewModelMultipart by api.") + modeluuid := ctx.Query("modeluuid") + model, err := models.QueryModelById(modeluuid) + if err == nil { + if errStr := checkModelPermission(ctx, model); errStr != "" { + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "msg": errStr, + }) + return + } + } else { + ctx.JSON(200, map[string]string{ + "result_code": "-1", + "msg": "model not exist.", + }) + return + } + routeRepo.NewModelMultipart(ctx.Context) } +func checkModelPermission(ctx *context.APIContext, model *models.AiModelManage) string { + if ctx.User == nil { + return "User not login." + } + if ctx.Repo.Repository == nil { + repo, err := models.GetRepositoryByID(model.RepoId) + if err == nil { + ctx.Repo.Repository = repo + owner, err := models.GetUserByID(repo.OwnerID) + if err == nil { + ctx.Repo.Owner = owner + } + } else { + return "Repo is not exist." + } + } + if !routeRepo.IsOperModifyOrDelete(ctx.Context, model.UserId) { + return "User has not right to operate." + } + return "" +} + func GetModelMultipartUploadUrl(ctx *context.APIContext) { log.Info("GetModelMultipartUploadUrl by api.") routeRepo.GetModelMultipartUploadUrl(ctx.Context) diff --git a/routers/api/v1/repo/cloudbrain_dashboard.go b/routers/api/v1/repo/cloudbrain_dashboard.go index 4c7b70b281..b0d1432228 100755 --- a/routers/api/v1/repo/cloudbrain_dashboard.go +++ b/routers/api/v1/repo/cloudbrain_dashboard.go @@ -9,6 +9,8 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/services/cloudbrain/resource" @@ -965,7 +967,7 @@ func GetCloudbrainsDetailData(ctx *context.Context) { taskDetail.Spec = ciTasks[i].Spec tasks = append(tasks, taskDetail) } - pager := context.NewPagination(int(count), pageSize, page, getTotalPage(count, pageSize)) + pager := context.NewPagination(int(count), pageSize, page, util.GetTotalPage(count, pageSize)) pager.SetDefaultParams(ctx) pager.AddParam(ctx, "listType", "ListType") @@ -1461,7 +1463,7 @@ func DownloadCloudBrainBoard(ctx *context.Context) { ctx.Error(http.StatusBadRequest, ctx.Tr("repo.cloudbrain_query_fail")) return } - totalPage := getTotalPage(total, pageSize) + totalPage := util.GetTotalPage(total, pageSize) f := excelize.NewFile() index := f.NewSheet(cloudBrain) diff --git a/routers/api/v1/repo/modelmanage.go b/routers/api/v1/repo/modelmanage.go index d4187348db..a653b66260 100644 --- a/routers/api/v1/repo/modelmanage.go +++ b/routers/api/v1/repo/modelmanage.go @@ -40,14 +40,21 @@ func DownloadModel(ctx *context.APIContext) { routerRepo.DownloadMultiModelFile(ctx.Context) } +func DownloadModelSingle(ctx *context.APIContext) { + log.Info("DownloadModel by api.") + routerRepo.DownloadSingleModelFile(ctx.Context) +} + func QueryModelById(ctx *context.APIContext) { log.Info("QueryModelById by api.") - routerRepo.QueryModelById(ctx.Context) + model := routerRepo.QueryModelObjById(ctx.Context) + ctx.JSON(200, model) } func QueryModelByName(ctx *context.APIContext) { log.Info("QueryModelByName by api.") - routerRepo.ShowSingleModel(ctx.Context) + models := routerRepo.QueryModelObjByName(ctx.Context) + ctx.JSON(200, models) } func QueryModelListForPredict(ctx *context.APIContext) { diff --git a/routers/api/v1/repo/repo_dashbord.go b/routers/api/v1/repo/repo_dashbord.go index e3a54b4fab..730acc913c 100644 --- a/routers/api/v1/repo/repo_dashbord.go +++ b/routers/api/v1/repo/repo_dashbord.go @@ -8,6 +8,8 @@ import ( "strings" "time" + "code.gitea.io/gitea/modules/util" + "github.com/360EntSecGroup-Skylar/excelize/v2" "code.gitea.io/gitea/models" @@ -290,7 +292,7 @@ func GetAllProjectsPeriodStatistics(ctx *context.Context) { projectsPeriodData := ProjectsPeriodData{ RecordBeginTime: recordBeginTime.Format(DATE_FORMAT), PageSize: pageSize, - TotalPage: getTotalPage(total, pageSize), + TotalPage: util.GetTotalPage(total, pageSize), TotalCount: total, LastUpdatedTime: latestUpdatedTime, PageRecords: models.GetRepoStatisticByRawSql(sql), @@ -349,7 +351,7 @@ func ServeAllProjectsPeriodStatisticsFile(ctx *context.Context) { var projectAnalysis = ctx.Tr("repo.repo_stat_inspect") fileName := getFileName(ctx, beginTime, endTime, projectAnalysis) - totalPage := getTotalPage(total, pageSize) + totalPage := util.GetTotalPage(total, pageSize) f := excelize.NewFile() @@ -431,7 +433,7 @@ func GetProjectsSummaryDataFile(ctx *context.Context) { if queryType == "all" || queryType == "current_year" { dates := getEndOfMonthDates(beginTime, endTime) total, _ = models.GetSummaryStatisticByDateCount(dates) - totalPage := getTotalPage(total, pageSize) + totalPage := util.GetTotalPage(total, pageSize) for i := 0; i < totalPage; i++ { @@ -458,7 +460,7 @@ func GetProjectsSummaryDataFile(ctx *context.Context) { } else { total, _ = models.GetSummaryStatisticByTimeCount(beginTime, endTime) - totalPage := getTotalPage(total, pageSize) + totalPage := util.GetTotalPage(total, pageSize) for i := 0; i < totalPage; i++ { @@ -530,7 +532,7 @@ func ServeAllProjectsOpenIStatisticsFile(ctx *context.Context) { var projectAnalysis = ctx.Tr("repo.repo_stat_inspect") fileName := "项目分析_OPENI_" + date + ".xlsx" - totalPage := getTotalPage(total, pageSize) + totalPage := util.GetTotalPage(total, pageSize) f := excelize.NewFile() @@ -960,16 +962,6 @@ func getGrowthRecordBeginTime() (time.Time, error) { return time.ParseInLocation(DATE_FORMAT, setting.RadarMap.GrowthBeginTime, time.Local) } -func getTotalPage(total int64, pageSize int) int { - - another := 0 - if int(total)%pageSize != 0 { - another = 1 - } - return int(total)/pageSize + another - -} - func ProjectNumVisit(ctx *context.APIContext) { var ( err error diff --git a/routers/notice/notice.go b/routers/notice/notice.go index 5a39ee684f..875107b2de 100644 --- a/routers/notice/notice.go +++ b/routers/notice/notice.go @@ -4,6 +4,7 @@ import ( "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" "encoding/json" "github.com/patrickmn/go-cache" "time" @@ -35,14 +36,7 @@ func GetNewestNotice() (*NoticeResponse, error) { } }() - var notice *NoticeResponse - var err error - if setting.CacheOn { - notice, err = getNewestNoticesFromCacheAndDisk() - } else { - notice, err = getNewestNoticesFromDisk() - } - + notice, err := getNewestNoticesFromCacheAndDisk() if err != nil { return nil, err } @@ -74,6 +68,9 @@ func getNewestNoticesFromDisk() (*NoticeResponse, error) { return res, nil } +var updateNoticeLock util.NonBlockingLock +var defaultNotice *NoticeResponse + func getNewestNoticesFromCacheAndDisk() (*NoticeResponse, error) { v, success := noticeCache.Get(NOTICE_CACHE_KEY) if success { @@ -84,13 +81,28 @@ func getNewestNoticesFromCacheAndDisk() (*NoticeResponse, error) { n := v.(*NoticeResponse) return n, nil } - - notice, err := getNewestNoticesFromDisk() - if err != nil { - log.Error("GetNewestNotice failed, error=%v", err) - noticeCache.Set(NOTICE_CACHE_KEY, nil, 30*time.Second) - return nil, err + if updateNoticeLock.TryLock() { + defer updateNoticeLock.Unlock() + v, success = noticeCache.Get(NOTICE_CACHE_KEY) + if success { + log.Debug("Get notice from cache,value = %v", v) + if v == nil { + return nil, nil + } + n := v.(*NoticeResponse) + return n, nil + } + log.Info("try to update notice") + notice, err := getNewestNoticesFromDisk() + if err != nil { + log.Error("GetNewestNotice failed, error=%v", err) + noticeCache.Set(NOTICE_CACHE_KEY, nil, 30*time.Second) + return nil, err + } + noticeCache.Set(NOTICE_CACHE_KEY, notice, getNoticeTimeout()) + defaultNotice = notice + return notice, nil } - noticeCache.Set(NOTICE_CACHE_KEY, notice, getNoticeTimeout()) - return notice, nil + //未拿到锁的线程返回旧值 + return defaultNotice, nil } diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go index 4473a93b47..e68cef9ad4 100644 --- a/routers/repo/ai_model_manage.go +++ b/routers/repo/ai_model_manage.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "fmt" + "io" "net/http" "net/url" "path" @@ -671,26 +672,31 @@ func MinioDownloadManyFile(path string, ctx *context.Context, returnFileName str ctx.ServerError("download file failed:", err) return } else { - defer body.Close() - p := make([]byte, 1024) - var readErr error - var readCount int // 读取对象内容 - for { - readCount, readErr = body.Read(p) - if readCount > 0 { - fDest.Write(p[:readCount]) - } - if readErr != nil { - break - } - } + bodyReader(body, fDest) } } } } +func bodyReader(body io.ReadCloser, fDest io.Writer) { + defer body.Close() + p := make([]byte, 1024) + var readErr error + var readCount int + + for { + readCount, readErr = body.Read(p) + if readCount > 0 { + fDest.Write(p[:readCount]) + } + if readErr != nil { + break + } + } +} + func downloadFromCloudBrainOne(path string, task *models.AiModelManage, ctx *context.Context, id string) { allFile, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, path) if err == nil { @@ -726,25 +732,30 @@ func ObsDownloadManyFile(path string, ctx *context.Context, returnFileName strin ctx.ServerError("download file failed:", err) return } else { - defer body.Close() - p := make([]byte, 1024) - var readErr error - var readCount int // 读取对象内容 - for { - readCount, readErr = body.Read(p) - if readCount > 0 { - fDest.Write(p[:readCount]) - } - if readErr != nil { - break - } - } + obsBodyReader(body, fDest) } } } } +func obsBodyReader(body io.ReadCloser, fDest io.Writer) { + defer body.Close() + p := make([]byte, 1024) + var readErr error + var readCount int + + for { + readCount, readErr = body.Read(p) + if readCount > 0 { + fDest.Write(p[:readCount]) + } + if readErr != nil { + break + } + } +} + func downloadFromCloudBrainTwo(path string, task *models.AiModelManage, ctx *context.Context, id string) { allFile, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, path) if err == nil { @@ -938,7 +949,7 @@ func ShowModelInfo(ctx *context.Context) { ctx.HTML(200, tplModelInfo) } -func QueryModelById(ctx *context.Context) { +func QueryModelObjById(ctx *context.Context) *models.AiModelManage { id := ctx.Query("id") model, err := models.QueryModelById(id) if err == nil { @@ -946,13 +957,22 @@ func QueryModelById(ctx *context.Context) { model.IsCanDelete = isCanDelete(ctx, model.UserId) model.IsCanDownload = isCanDownload(ctx, model) removeIpInfo(model) + return model + } else { + return nil + } +} + +func QueryModelById(ctx *context.Context) { + model := QueryModelObjById(ctx) + if model != nil { ctx.JSON(http.StatusOK, model) } else { ctx.JSON(http.StatusNotFound, nil) } } -func ShowSingleModel(ctx *context.Context) { +func QueryModelObjByName(ctx *context.Context) []*models.AiModelManage { name := ctx.Query("name") log.Info("Show single ModelInfo start.name=" + name) modelArrays := models.QueryModelByName(name, ctx.Repo.Repository.ID) @@ -993,8 +1013,11 @@ func ShowSingleModel(ctx *context.Context) { model.UserRelAvatarLink = value.RelAvatarLink() } } + return modelResult +} - ctx.JSON(http.StatusOK, modelResult) +func ShowSingleModel(ctx *context.Context) { + ctx.JSON(http.StatusOK, QueryModelObjByName(ctx)) } func removeIpInfo(model *models.AiModelManage) { @@ -1161,6 +1184,8 @@ func isAdminRight(ctx *context.Context) bool { if err != nil { log.Error("GetUserRepoPermission failed:%v", err.Error()) return false + } else { + log.Info("permission.AccessMode=" + string(permission.AccessMode)) } if permission.AccessMode >= models.AccessModeAdmin { return true @@ -1170,6 +1195,7 @@ func isAdminRight(ctx *context.Context) bool { func isOperModifyOrDelete(ctx *context.Context, modelUserId int64) bool { if ctx.User == nil { + log.Info("user is nil") return false } if ctx.User.IsAdmin || ctx.User.ID == modelUserId { @@ -1178,6 +1204,10 @@ func isOperModifyOrDelete(ctx *context.Context, modelUserId int64) bool { return isAdminRight(ctx) } +func IsOperModifyOrDelete(ctx *context.Context, modelUserId int64) bool { + return isOperModifyOrDelete(ctx, modelUserId) +} + func ShowModelPageInfo(ctx *context.Context) { log.Info("ShowModelInfo start.") if !isQueryRight(ctx) { diff --git a/routers/repo/commit.go b/routers/repo/commit.go index a7a8d30d09..c30ec8460a 100644 --- a/routers/repo/commit.go +++ b/routers/repo/commit.go @@ -179,7 +179,7 @@ func FileHistory(ctx *context.Context) { page = 1 } - commits, err := ctx.Repo.GitRepo.CommitsByFileAndRange(branchName, fileName, page) + commits, err := ctx.Repo.GitRepo.CommitsByFileAndRangeNoFollow(branchName, fileName, page) if err != nil { ctx.ServerError("CommitsByFileAndRange", err) return diff --git a/services/ai_task_service/cluster/c2net.go b/services/ai_task_service/cluster/c2net.go index 3c8ab310c9..e4db4f5712 100644 --- a/services/ai_task_service/cluster/c2net.go +++ b/services/ai_task_service/cluster/c2net.go @@ -500,6 +500,14 @@ func buildExecCodeCommand(codeDirPath, modelFilePath, bootFile, computeResource, } if computeResource == models.NPU { builder.Next(entity.NewCommand("source", "/home/ma-user/.bashrc")). + Next(entity.NewCommand("export", "GLOG_v=3")). + Next(entity.NewCommand("export", "ASCEND_GLOBAL_LOG_LEVEL=3")). + Next(entity.NewCommand("export", "ASCEND_SLOG_PRINT_TO_STDOUT=0 ")). + Next(entity.NewCommand("export", "HCCL_CONNECT_TIMEOUT=3600")). + Next(entity.NewCommand("export", "HCCL_EXEC_TIMEOUT=1800")). + Next(entity.NewCommand("export", "PIPELINE_SLICE_SKIP_REDISTRIBUTION=1")). + Next(entity.NewCommand("export", "MS_DEV_REDUNDANCY_TASK_NUM=4")). + Next(entity.NewCommand("export", "MS_DEV_CELL_REUSE=2")). Next(entity.NewCommand("python", "/home/ma-user/davinci/train/davincirun.py", "python", "/home/ma-user/grampus.py", paramCode)) } else if computeResource == models.GCU { builder.Next(entity.NewCommand("cd", codeDirPath)) @@ -699,7 +707,7 @@ func (c C2NetClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDownloadIn }, nil } -func (c C2NetClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { +func (c C2NetClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType) url, err := helper.GetSignedDownloadUrl(opts.Path) if err != nil { @@ -711,8 +719,8 @@ func (c C2NetClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterOutp }, nil } -func (c C2NetClusterAdapter) GetAllOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { - return GetAllOutputDownloadInfo(opts) +func (c C2NetClusterAdapter) DownloadAllOutput(opts entity.DownloadOutputOpts) error { + return DownloadAllOutput(opts) } func (c C2NetClusterAdapter) GetNodeInfo(opts entity.ClusterNodeInfoOpts) ([]entity.AITaskNodeInfo, error) { diff --git a/services/ai_task_service/cluster/cloudbrain_one.go b/services/ai_task_service/cluster/cloudbrain_one.go index 66e6763fbf..f43b9902b8 100644 --- a/services/ai_task_service/cluster/cloudbrain_one.go +++ b/services/ai_task_service/cluster/cloudbrain_one.go @@ -427,7 +427,7 @@ func (c CloudbrainOneClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDo }, nil } -func (c CloudbrainOneClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { +func (c CloudbrainOneClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType) url, err := helper.GetSignedDownloadUrl(opts.Path) if err != nil { @@ -439,8 +439,8 @@ func (c CloudbrainOneClusterAdapter) GetSingleOutputDownloadInfo(opts entity.Clu }, nil } -func (c CloudbrainOneClusterAdapter) GetAllOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { - return GetAllOutputDownloadInfo(opts) +func (c CloudbrainOneClusterAdapter) DownloadAllOutput(opts entity.DownloadOutputOpts) error { + return DownloadAllOutput(opts) } func (c CloudbrainOneClusterAdapter) GetNodeInfo(opts entity.ClusterNodeInfoOpts) ([]entity.AITaskNodeInfo, error) { diff --git a/services/ai_task_service/cluster/cloudbrain_two.go b/services/ai_task_service/cluster/cloudbrain_two.go index 58ca694293..216c71c8db 100644 --- a/services/ai_task_service/cluster/cloudbrain_two.go +++ b/services/ai_task_service/cluster/cloudbrain_two.go @@ -425,6 +425,7 @@ func convertCloudbrainTwoTrainJobUserImageReq(req entity.CreateTrainTaskRequest, trainUrl := JointCloudbrainTwoReqUrl(t.OutPut) logUrl := JointCloudbrainTwoReqUrl(t.LogPath) params := handleCloudbrainTwoParameter(req) + return models.CreateUserImageTrainJobParams{ JobName: req.Name, Description: req.Description, @@ -447,7 +448,6 @@ func convertCloudbrainTwoTrainJobUserImageReq(req entity.CreateTrainTaskRequest, NasType: setting.ModelArtsNasType, }, } - } func getCloudbrainTwoDataUrl(data []entity.ContainerData) string { @@ -766,7 +766,7 @@ func (c CloudbrainTwoClusterAdapter) GetLogDownloadInfo(opts entity.ClusterLogDo }, nil } -func (c CloudbrainTwoClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { +func (c CloudbrainTwoClusterAdapter) GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType) url, err := helper.GetSignedDownloadUrl(opts.Path) if err != nil { @@ -778,8 +778,8 @@ func (c CloudbrainTwoClusterAdapter) GetSingleOutputDownloadInfo(opts entity.Clu }, nil } -func (c CloudbrainTwoClusterAdapter) GetAllOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { - return GetAllOutputDownloadInfo(opts) +func (c CloudbrainTwoClusterAdapter) DownloadAllOutput(opts entity.DownloadOutputOpts) error { + return DownloadAllOutput(opts) } func (c CloudbrainTwoClusterAdapter) GetTrainJobOperationProfile(jobId string) (*entity.OperationProfile, error) { diff --git a/services/ai_task_service/cluster/cluster_base.go b/services/ai_task_service/cluster/cluster_base.go index e3fbb5ea1f..ce66e16629 100644 --- a/services/ai_task_service/cluster/cluster_base.go +++ b/services/ai_task_service/cluster/cluster_base.go @@ -42,8 +42,8 @@ type ClusterAdapter interface { GetTrainJobOperationProfile(jobId string) (*entity.OperationProfile, error) GetOutput(opts entity.ClusterOutputOpts) (*entity.ClusterAITaskOutput, error) GetAllOutput(opts entity.ClusterOutputOpts) (*entity.AllAITaskOutput, error) - GetSingleOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) - GetAllOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) + GetSingleOutputDownloadInfo(opts entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) + DownloadAllOutput(opts entity.DownloadOutputOpts) error GetNodeInfo(opts entity.ClusterNodeInfoOpts) ([]entity.AITaskNodeInfo, error) GetResourceUsage(opts entity.ClusterResourceUsageOpts) (*entity.ResourceUsage, error) //GetImages return available list of clusters diff --git a/services/ai_task_service/cluster/common.go b/services/ai_task_service/cluster/common.go index 29c512b8ca..33f02a764b 100644 --- a/services/ai_task_service/cluster/common.go +++ b/services/ai_task_service/cluster/common.go @@ -1,6 +1,7 @@ package cluster import ( + "archive/zip" "bufio" "code.gitea.io/gitea/entity" "code.gitea.io/gitea/modules/log" @@ -95,46 +96,59 @@ func getLogFilesInStorage(helper storage_helper.StorageHelper, objectKeyPrefix s return logFiles } -func GetAllOutputDownloadInfo(opts entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) { +func DownloadAllOutput(opts entity.DownloadOutputOpts) error { helper := storage_helper.SelectUploaderFromStorageType(opts.StorageType) var err error fileList, err := helper.GetAllObjectsUnderDir(opts.Path) if err != nil { log.Error("GetAllObjectsUnderDir err.objectKeyPrefix=%s,err=%v", opts.Path, err) - return nil, err + return err } if len(fileList) == 0 { - return nil, nil - } - - res := &entity.FileDownloadInfo{ - Readers: make([]entity.FileReader, 0), - ResultType: entity.FileTypeZIP, - ResultFileName: opts.JobName + ".zip", + return nil } - defer func() { - if err != nil { - res.Close() - } - }() - for i := 0; i < len(fileList); i++ { file := fileList[i] if file.IsDir { continue } - var reader io.ReadCloser - reader, err = helper.OpenFile(file.RelativePath) + err = openAndWrite2ZIP(helper, file, opts.ZIPWriter) if err != nil { - log.Error("GetAllOutputDownloadInfo OpenFile err.opts=%+v,err =%v", opts, err) - return nil, err + log.Error("openAndWrite2ZIP err.%v", err) + return err } - res.Readers = append(res.Readers, entity.FileReader{ - Reader: reader, - Name: file.FileName, - }) } - return res, nil + return nil +} + +func openAndWrite2ZIP(helper storage_helper.StorageHelper, file storage.FileInfo, zipWriter *zip.Writer) error { + var reader io.ReadCloser + reader, err := helper.OpenFile(file.RelativePath) + if err != nil { + log.Error("openAndWrite2ZIP OpenFile err.filePath=%+v,err =%v", file.RelativePath, err) + return err + } + defer reader.Close() + + fDest, err := zipWriter.Create(file.FileName) + if err != nil { + log.Error("zipWriter.Create error.%v", err) + return err + } + p := make([]byte, 1024) + var readErr error + var readCount int + // 读取对象内容 + for { + readCount, readErr = reader.Read(p) + if readCount > 0 { + fDest.Write(p[:readCount]) + } + if readErr != nil { + break + } + } + return nil } diff --git a/services/ai_task_service/storage_helper/obs.go b/services/ai_task_service/storage_helper/obs.go index 1620166d75..836801d87d 100644 --- a/services/ai_task_service/storage_helper/obs.go +++ b/services/ai_task_service/storage_helper/obs.go @@ -54,7 +54,8 @@ func (m *OBSHelper) GetOneLevelObjectsUnderDir(dirPath string, maxKeyArray ...in input.Bucket = m.GetBucket() input.Prefix = dirPath input.Delimiter = "/" - maxKey := setting.OUTPUT_SHOW_MAX_KEY + //结果包含文件夹本身的key,所以maxKey需要+1 + maxKey := setting.OUTPUT_SHOW_MAX_KEY + 1 if len(maxKeyArray) > 0 { maxKey = maxKeyArray[0] } diff --git a/services/ai_task_service/task/cloudbrain_two_train_task.go b/services/ai_task_service/task/cloudbrain_two_train_task.go index ba46c828cf..071c925658 100644 --- a/services/ai_task_service/task/cloudbrain_two_train_task.go +++ b/services/ai_task_service/task/cloudbrain_two_train_task.go @@ -1,6 +1,9 @@ package task import ( + "encoding/json" + "strings" + "code.gitea.io/gitea/entity" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/log" @@ -9,8 +12,6 @@ import ( "code.gitea.io/gitea/modules/timeutil" "code.gitea.io/gitea/routers/response" "code.gitea.io/gitea/services/ai_task_service/context" - "encoding/json" - "strings" ) type CloudbrainTwoTrainTaskTemplate struct { @@ -107,6 +108,12 @@ func (g CloudbrainTwoTrainTaskTemplate) CallCreationAPI(ctx *context.CreationCon log.Error("UresourcePools.Info is empty. %v", err) return response.SYSTEM_ERROR } + modelarts_poolid := resourcePools.Info[0].ID + for _, t := range resourcePools.Info { + if t.Value == ctx.Spec.QueueCode { + modelarts_poolid = t.ID + } + } form := ctx.Request req := entity.CreateTrainTaskRequest{ Name: form.JobName, @@ -131,7 +138,7 @@ func (g CloudbrainTwoTrainTaskTemplate) CallCreationAPI(ctx *context.CreationCon OutPut: ctx.GetContainerDataArray(entity.ContainerOutPutPath), Params: form.ParamArray, Spec: ctx.Spec, - PoolId: resourcePools.Info[0].ID, + PoolId: modelarts_poolid, WorkServerNumber: form.WorkServerNumber, }, }, diff --git a/services/ai_task_service/task/monitor.go b/services/ai_task_service/task/monitor.go new file mode 100644 index 0000000000..564a4ca5b6 --- /dev/null +++ b/services/ai_task_service/task/monitor.go @@ -0,0 +1,89 @@ +package task + +import ( + "encoding/csv" + "strconv" + + "code.gitea.io/gitea/modules/context" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/util" +) + +func MonitorTaskFile(status string, ctx *context.Context) error { + + total := monitorTasksCount(status) + pageSize := 300 + totalPage := util.GetTotalPage(total, pageSize) + + f := csv.NewWriter(ctx.Resp) + + err := f.Write(allTaskHeader()) + if err != nil { + return err + } + + for i := 0; i <= totalPage; i++ { + + pageRecords, err := monitorTasks(status, i+1, pageSize) + if err != nil { + log.Warn("Get monitor page task err", err) + continue + + } + for _, record := range pageRecords { + + err = f.Write(allTaskValues(record)) + if err != nil { + return err + } + + } + + } + f.Flush() + return nil +} + +func allTaskValues(record map[string]string) []string { + + return []string{record["id"], record["job_id"], record["job_type"], record["job_name"], record["compute_resource"], + record["status"], record["type"], record["work_server_number"], record["owner_name"], record["repo_name"]} +} + +func allTaskHeader() []string { + return []string{"id", "job_id", "job_type", "job_name", + "compute_resource", "status", "type", + "work_server_number", + "owner_name", "repo_name"} +} + +func monitorTasksCount(status string) int64 { + + countSql := "select count(*) from cloudbrain a, repository b where a.repo_id=b.id and job_type='TRAIN' and type!=0 and type!=3 " + if status != "" { + countSql += " and a.status= '" + status + "' " + } + + count, err := models.CountByRawSql(countSql) + + if err != nil { + log.Error("Get monitor task count error", err) + } + return count +} + +func monitorTasks(status string, page, pageSize int) ([]map[string]string, error) { + + sql := "select a.id, job_id,job_type,job_name,compute_resource, a.status,type,work_server_number,owner_name, b.name as repo_name from cloudbrain a, repository b where a.repo_id=b.id and job_type='TRAIN' and type!=0 and type!=3 " + if status != "" { + sql += " and a.status= '" + status + "' " + } + sql += " order by a.id desc" + + sql += " limit " + strconv.Itoa(pageSize) + " offset " + strconv.Itoa((page-1)*pageSize) + + return models.QueryByRawSql(sql) + +} diff --git a/services/ai_task_service/task/task_base.go b/services/ai_task_service/task/task_base.go index 604f58d2a0..9f62eba2cf 100644 --- a/services/ai_task_service/task/task_base.go +++ b/services/ai_task_service/task/task_base.go @@ -53,8 +53,8 @@ type AITaskTemplate interface { Update(cloudbrainId int64) *response.BizError GetLog(opts entity.QueryLogOpts) (*entity.ClusterLog, *response.BizError) GetLogDownloadInfo(opts entity.GetLogDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) - GetSingleOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) - GetAllOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) + GetSingleOutputDownloadInfo(opts entity.GetSingleDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) + DownloadAllOutput(opts entity.DownloadAllFileReq) *response.BizError GetOutput(cloudbrainId int64, parentDir string) (*entity.AITaskOutput, *response.BizError) GetAllOutput(opts entity.GetAllOutputReq) (*entity.AllAITaskOutput, *response.BizError) GetDebugUrl(cloudbrainId int64, fileName ...string) (string, *response.BizError) @@ -346,7 +346,7 @@ func (g DefaultAITaskTemplate) GetLogDownloadInfo(opts entity.GetLogDownloadInfo return s, nil } -func (g DefaultAITaskTemplate) GetSingleOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) { +func (g DefaultAITaskTemplate) GetSingleOutputDownloadInfo(opts entity.GetSingleDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) { c := g.GetMyCluster() if c == nil { log.Error("Get cluster failed,cloudbrainId=%d", opts) @@ -361,19 +361,19 @@ func (g DefaultAITaskTemplate) GetSingleOutputDownloadInfo(opts entity.GetOutput return s, nil } -func (g DefaultAITaskTemplate) GetAllOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq) (*entity.FileDownloadInfo, *response.BizError) { +func (g DefaultAITaskTemplate) DownloadAllOutput(opts entity.DownloadAllFileReq) *response.BizError { c := g.GetMyCluster() if c == nil { log.Error("Get cluster failed,cloudbrainId=%d", opts) - return nil, response.SYSTEM_ERROR + return response.SYSTEM_ERROR } - s, err := GetAllOutputDownloadInfo(opts, c.GetAllOutputDownloadInfo) + err := DownloadAllOutput(opts, c.DownloadAllOutput) if err != nil { log.Error("GetOutputDownloadInfo err.cloudbrainId=%d ", opts) - return nil, nil + return nil } - return s, nil + return nil } func (g DefaultAITaskTemplate) GetOutput(cloudbrainId int64, parentDir string) (*entity.AITaskOutput, *response.BizError) { diff --git a/services/ai_task_service/task/task_service.go b/services/ai_task_service/task/task_service.go index 84a3a44763..403a70ead9 100644 --- a/services/ai_task_service/task/task_service.go +++ b/services/ai_task_service/task/task_service.go @@ -36,8 +36,8 @@ type GetNotebookUrlFunc func(string) (string, error) type GetNodeInfoFunc func(entity.ClusterNodeInfoOpts) ([]entity.AITaskNodeInfo, error) type GetOutputFunc func(entity.ClusterOutputOpts) (*entity.ClusterAITaskOutput, error) type GetAllOutputFunc func(entity.ClusterOutputOpts) (*entity.AllAITaskOutput, error) -type GetSingleOutputDownloadInfoFunc func(req entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) -type GetAllOutputDownloadInfoFunc func(req entity.ClusterOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) +type GetSingleOutputDownloadInfoFunc func(req entity.ClusterSingleOutputDownloadInfoOpts) (*entity.FileDownloadInfo, error) +type DownloadAllOutputFunc func(req entity.DownloadOutputOpts) error type GetOperationProfileFunc func(string) (*entity.OperationProfile, error) type GetResourceUsageFunc func(entity.ClusterResourceUsageOpts) (*entity.ResourceUsage, error) @@ -430,7 +430,7 @@ func GetLogDownloadInfo(opts entity.GetLogDownloadInfoReq, getLogDownloadInfo Ge }) } -func GetSingleOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq, f GetSingleOutputDownloadInfoFunc) (*entity.FileDownloadInfo, error) { +func GetSingleOutputDownloadInfo(opts entity.GetSingleDownloadInfoReq, f GetSingleOutputDownloadInfoFunc) (*entity.FileDownloadInfo, error) { cloudbrain, err := models.GetCloudbrainByCloudbrainID(opts.CloudbrainId) if err != nil { return nil, err @@ -440,27 +440,28 @@ func GetSingleOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq, f GetSing } aiConfig := GetDetailConfigInfoByCloudbrain(cloudbrain) fileRelativePath := path.Join(aiConfig.OutputObjectPrefix, opts.ParentDir, opts.FileName) - return f(entity.ClusterOutputDownloadInfoOpts{ + return f(entity.ClusterSingleOutputDownloadInfoOpts{ JobId: cloudbrain.JobID, Path: fileRelativePath, StorageType: aiConfig.OutputStorageType, }) } -func GetAllOutputDownloadInfo(opts entity.GetOutputDownloadInfoReq, f GetAllOutputDownloadInfoFunc) (*entity.FileDownloadInfo, error) { +func DownloadAllOutput(opts entity.DownloadAllFileReq, downloadFunc DownloadAllOutputFunc) error { cloudbrain, err := models.GetCloudbrainByCloudbrainID(opts.CloudbrainId) if err != nil { - return nil, err + return err } if cloudbrain.JobID == "" { - return nil, nil + return nil } aiConfig := GetDetailConfigInfoByCloudbrain(cloudbrain) - return f(entity.ClusterOutputDownloadInfoOpts{ + return downloadFunc(entity.DownloadOutputOpts{ JobId: cloudbrain.JobID, Path: aiConfig.OutputObjectPrefix, StorageType: aiConfig.OutputStorageType, JobName: cloudbrain.JobName, + ZIPWriter: opts.ZIPWriter, }) } diff --git a/services/role/role.go b/services/role/role.go index a543519c01..f43a298210 100644 --- a/services/role/role.go +++ b/services/role/role.go @@ -13,6 +13,11 @@ var roleMap = map[models.RoleType]*models.Role{ Name: "奖励积分管理员", Description: "拥有奖励积分管理相关功能的管理员权限", }, + models.MonitorAdmin: { + Type: models.MonitorAdmin, + Name: "监测管理员", + Description: "拥有监测的管理员权限", + }, } func GetRole(roleType models.RoleType) *models.Role { diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl index 12465b5a8e..5f42d0bf25 100755 --- a/templates/base/head_navbar.tmpl +++ b/templates/base/head_navbar.tmpl @@ -7,7 +7,7 @@ -
+
@@ -48,7 +48,7 @@
+
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageHome}}
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageExplore}} {{.i18n.Tr "home"}} {{else if .IsLandingPageOrganizations}} @@ -231,7 +235,9 @@
-
+
@@ -277,7 +283,9 @@
-
+
diff --git a/templates/base/head_navbar_fluid.tmpl b/templates/base/head_navbar_fluid.tmpl index e8c7aee9ad..af9a6a323f 100644 --- a/templates/base/head_navbar_fluid.tmpl +++ b/templates/base/head_navbar_fluid.tmpl @@ -44,8 +44,8 @@ {{.i18n.Tr "repo.model_experience"}}
-
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageHome}}
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageExplore}} {{.i18n.Tr "home"}} {{else if .IsLandingPageOrganizations}} diff --git a/templates/base/head_navbar_pro.tmpl b/templates/base/head_navbar_pro.tmpl index f0acbe088f..8bfc0af72b 100644 --- a/templates/base/head_navbar_pro.tmpl +++ b/templates/base/head_navbar_pro.tmpl @@ -47,7 +47,7 @@
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageHome}}
+ {{.i18n.Tr "custom.head.forum"}} {{else if .IsLandingPageExplore}} {{.i18n.Tr "home"}} {{else if .IsLandingPageOrganizations}} diff --git a/templates/repo/datasets/index.tmpl b/templates/repo/datasets/index.tmpl index 21eb84f430..85053f3dff 100755 --- a/templates/repo/datasets/index.tmpl +++ b/templates/repo/datasets/index.tmpl @@ -180,7 +180,7 @@ {{$.i18n.Tr "dataset.unfavorite"}} - diff --git a/templates/repo/diff/comments.tmpl b/templates/repo/diff/comments.tmpl index 288b2316a6..ef00dcc81e 100644 --- a/templates/repo/diff/comments.tmpl +++ b/templates/repo/diff/comments.tmpl @@ -35,7 +35,7 @@
-
+
-
+
-
+
-
+