#2047 V20220428分支合入develop

Merged
lewis merged 187 commits from V20220428 into develop 2 years ago
  1. +17
    -0
      models/attachment.go
  2. +2
    -0
      models/cloudbrain.go
  3. +2
    -2
      models/cloudbrain_image.go
  4. +15
    -3
      models/dataset.go
  5. +13
    -9
      models/helper_environment.go
  6. +35
    -0
      models/issue.go
  7. +2
    -0
      models/issue_comment.go
  8. +1
    -0
      models/repo_list.go
  9. +353
    -44
      models/user_business_analysis.go
  10. +122
    -66
      models/user_business_struct.go
  11. +10
    -0
      modules/auth/cloudbrain.go
  12. +1
    -0
      modules/cloudbrain/cloudbrain.go
  13. +40
    -1
      modules/cloudbrain/resty.go
  14. +54
    -3
      modules/repository/hooks.go
  15. +3
    -0
      modules/setting/repository.go
  16. +26
    -0
      modules/ssh/ssh.go
  17. +7
    -1
      modules/storage/minio_ext.go
  18. +52
    -38
      modules/storage/obs.go
  19. +29
    -2
      modules/templates/helper.go
  20. +26
    -2
      options/locale/locale_en-US.ini
  21. +26
    -2
      options/locale/locale_zh-CN.ini
  22. +6
    -1
      public/home/home.js
  23. +29
    -22
      public/home/search.js
  24. +7
    -0
      routers/admin/cloudbrains.go
  25. +23
    -1
      routers/admin/dataset.go
  26. +6
    -2
      routers/home.go
  27. +4
    -48
      routers/repo/attachment.go
  28. +59
    -10
      routers/repo/cloudbrain.go
  29. +3
    -0
      routers/repo/dataset.go
  30. +4
    -0
      routers/repo/http.go
  31. +30
    -1
      routers/repo/issue.go
  32. +16
    -17
      routers/repo/modelarts.go
  33. +130
    -99
      routers/repo/user_data_analysis.go
  34. +5
    -2
      routers/routes/routes.go
  35. +17
    -14
      routers/search.go
  36. +5
    -3
      routers/user/profile.go
  37. +9
    -0
      services/issue/content.go
  38. +1
    -1
      services/socketwrap/clientManager.go
  39. +128
    -0
      templates/admin/cloudbrain/imagecommit.tmpl
  40. +0
    -0
      templates/admin/cloudbrain/images.tmpl
  41. +13
    -14
      templates/admin/dataset/list.tmpl
  42. +13
    -13
      templates/admin/dataset/search.tmpl
  43. +4
    -4
      templates/custom/select_dataset.tmpl
  44. +4
    -4
      templates/custom/select_dataset_train.tmpl
  45. +13
    -6
      templates/explore/datasets.tmpl
  46. +1
    -0
      templates/mail/auth/activate.tmpl
  47. +1
    -0
      templates/mail/auth/activate_email.tmpl
  48. +1
    -0
      templates/mail/auth/register_notify.tmpl
  49. +1
    -0
      templates/mail/auth/reset_passwd.tmpl
  50. +2
    -0
      templates/mail/issue/assigned.tmpl
  51. +2
    -0
      templates/mail/issue/default.tmpl
  52. +2
    -0
      templates/mail/notify/collaborator.tmpl
  53. +4
    -6
      templates/repo/attachment/upload.tmpl
  54. +20
    -1
      templates/repo/cloudbrain/new.tmpl
  55. +12
    -2
      templates/repo/cloudbrain/show.tmpl
  56. +1
    -1
      templates/repo/datasets/index.tmpl
  57. +2
    -2
      templates/repo/issue/branch_selector_field.tmpl
  58. +35
    -0
      templates/repo/issue/view_content/comments.tmpl
  59. +48
    -1
      templates/repo/issue/view_content/sidebar.tmpl
  60. +1
    -1
      templates/repo/modelarts/notebook/show.tmpl
  61. +26
    -16
      templates/repo/modelarts/trainjob/new.tmpl
  62. +99
    -6
      templates/repo/modelarts/trainjob/show.tmpl
  63. +1
    -1
      templates/user/dashboard/feeds.tmpl
  64. +177
    -96
      web_src/js/components/MinioUploader.vue
  65. +0
    -484
      web_src/js/components/ObsUploader.vue
  66. +57
    -6
      web_src/js/components/UserAnalysis.vue
  67. +16
    -18
      web_src/js/components/images/adminImages.vue
  68. +23
    -3
      web_src/js/features/images.js
  69. +122
    -41
      web_src/js/index.js
  70. +16
    -1
      web_src/less/openi.less

+ 17
- 0
models/attachment.go View File

@@ -10,6 +10,7 @@ import (
"io"
"path"
"strings"
"time"

"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/obs"
@@ -64,6 +65,7 @@ type AttachmentInfo struct {
Repo *Repository `xorm:"extends"`
RelAvatarLink string `xorm:"extends"`
UserName string `xorm:"extends"`
Recommend bool `xorm:"-"`
}

type AttachmentsOptions struct {
@@ -78,6 +80,7 @@ type AttachmentsOptions struct {
JustNeedZipFile bool
NeedRepoInfo bool
Keyword string
RecommendOnly bool
}

func (a *Attachment) AfterUpdate() {
@@ -104,6 +107,14 @@ func (a *Attachment) IncreaseDownloadCount() error {
return nil
}

func (a *Attachment) UpdateDatasetUpdateUnix() error {
// Update download count.
if _, err := x.Exec("UPDATE `dataset` SET updated_unix="+fmt.Sprint(time.Now().Unix())+" WHERE id=?", a.DatasetID); err != nil {
return fmt.Errorf("UpdateDatasetUpdateUnix: %v", err)
}
return nil
}

// APIFormat converts models.Attachment to api.Attachment
func (a *Attachment) APIFormat() *api.Attachment {
return &api.Attachment{
@@ -570,6 +581,11 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
builder.Eq{"attachment.is_private": opts.IsPrivate},
)
}
if opts.RecommendOnly {
cond = cond.And(builder.In("attachment.id", builder.Select("attachment.id").
From("attachment").
Join("INNER", "dataset", "attachment.dataset_id = dataset.id and dataset.recommend=true")))
}

if opts.JustNeedZipFile {
var DecompressState []int32
@@ -618,6 +634,7 @@ func Attachments(opts *AttachmentsOptions) ([]*AttachmentInfo, int64, error) {
if err != nil {
return nil, 0, fmt.Errorf("GetDatasetByID failed error: %v", err)
}
attachment.Recommend = dataset.Recommend
repo, err := GetRepositoryByID(dataset.RepoID)
if err == nil {
attachment.Repo = repo


+ 2
- 0
models/cloudbrain.go View File

@@ -580,6 +580,8 @@ type CommitImageParams struct {
Topics []string
CloudBrainType int
UID int64
Place string
Type int
}

type CommitImageResult struct {


+ 2
- 2
models/cloudbrain_image.go View File

@@ -567,12 +567,12 @@ func isImageStaring(e Engine, userID, imageID int64) bool {
}
func RecommendImage(imageId int64, recommond bool) error {

image := Image{Type: getRecommondType(recommond)}
image := Image{Type: GetRecommondType(recommond)}
_, err := x.ID(imageId).Cols("type").Update(image)
return err
}

func getRecommondType(recommond bool) int {
func GetRecommondType(recommond bool) int {
if recommond {

return RECOMMOND_TYPE


+ 15
- 3
models/dataset.go View File

@@ -23,7 +23,8 @@ type Dataset struct {
Category string
Description string `xorm:"TEXT"`
DownloadTimes int64
NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
NumStars int `xorm:"INDEX NOT NULL DEFAULT 0"`
Recommend bool `xorm:"INDEX NOT NULL DEFAULT false"`
License string
Task string
ReleaseID int64 `xorm:"INDEX"`
@@ -99,6 +100,7 @@ type SearchDatasetOptions struct {
OwnerID int64
RepoID int64
IncludePublic bool
RecommendOnly bool
Category string
Task string
License string
@@ -132,6 +134,13 @@ func CreateDataset(dataset *Dataset) (err error) {

}

func RecommendDataset(dataSetId int64, recommend bool) error {

dataset := Dataset{Recommend: recommend}
_, err := x.ID(dataSetId).Cols("recommend").Update(dataset)
return err
}

func SearchDataset(opts *SearchDatasetOptions) (DatasetList, int64, error) {
cond := SearchDatasetCondition(opts)
return SearchDatasetByCondition(opts, cond)
@@ -146,7 +155,6 @@ func SearchDatasetCondition(opts *SearchDatasetOptions) builder.Cond {
if opts.RepoID > 0 {
cond = cond.And(builder.Eq{"dataset.repo_id": opts.RepoID})
}

if opts.IncludePublic {
cond = cond.And(builder.Eq{"dataset.status": DatasetStatusPublic})
cond = cond.And(builder.Eq{"attachment.is_private": false})
@@ -185,6 +193,10 @@ func generateFilterCond(opts *SearchDatasetOptions, cond builder.Cond) builder.C
cond = cond.And(builder.Eq{"dataset.license": opts.License})
}

if opts.RecommendOnly {
cond = cond.And(builder.Eq{"dataset.recommend": opts.RecommendOnly})
}

return cond
}

@@ -198,7 +210,7 @@ func SearchDatasetByCondition(opts *SearchDatasetOptions, cond builder.Cond) (Da
defer sess.Close()

datasets := make(DatasetList, 0, opts.PageSize)
selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars"
selectColumnsSql := "distinct dataset.id,dataset.title, dataset.status, dataset.category, dataset.description, dataset.download_times, dataset.license, dataset.task, dataset.release_id, dataset.user_id, dataset.repo_id, dataset.created_unix,dataset.updated_unix,dataset.num_stars,dataset.recommend"

count, err := sess.Distinct("dataset.id").Join("INNER", "repository", "repository.id = dataset.repo_id").
Join("INNER", "attachment", "attachment.dataset_id=dataset.id").


+ 13
- 9
models/helper_environment.go View File

@@ -12,15 +12,19 @@ import (

// env keys for git hooks need
const (
EnvRepoName = "GITEA_REPO_NAME"
EnvRepoUsername = "GITEA_REPO_USER_NAME"
EnvRepoIsWiki = "GITEA_REPO_IS_WIKI"
EnvPusherName = "GITEA_PUSHER_NAME"
EnvPusherEmail = "GITEA_PUSHER_EMAIL"
EnvPusherID = "GITEA_PUSHER_ID"
EnvKeyID = "GITEA_KEY_ID"
EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY"
EnvIsInternal = "GITEA_INTERNAL_PUSH"
EnvRepoName = "GITEA_REPO_NAME"
EnvRepoUsername = "GITEA_REPO_USER_NAME"
EnvRepoIsWiki = "GITEA_REPO_IS_WIKI"
EnvPusherName = "GITEA_PUSHER_NAME"
EnvPusherEmail = "GITEA_PUSHER_EMAIL"
EnvPusherID = "GITEA_PUSHER_ID"
EnvKeyID = "GITEA_KEY_ID"
EnvIsDeployKey = "GITEA_IS_DEPLOY_KEY"
EnvIsInternal = "GITEA_INTERNAL_PUSH"
EnvRepoSize = "REPO_CURRENT_SIZE"
EnvRepoMaxFileSize = "REPO_MAX_FILE_SIZE"
EnvRepoMaxSize = "REPO_MAX_SIZE"
EnvPushSizeCheckFlag = "PUSH_SIZE_CHECK_FLAG"
)

// InternalPushingEnvironment returns an os environment to switch off hooks on push


+ 35
- 0
models/issue.go View File

@@ -775,6 +775,41 @@ func (issue *Issue) ChangeContent(doer *User, content string) (err error) {
return sess.Commit()
}

// ChangeRef changes issue ref, as the given user.
func (issue *Issue) ChangeRef(doer *User, newRef string) (err error) {
oldRef := issue.Ref
issue.Ref = newRef
if oldRef == newRef {
return nil
}

sess := x.NewSession()
defer sess.Close()
if err = sess.Begin(); err != nil {
return err
}

if err = updateIssueCols(sess, issue, "ref"); err != nil {
sess.Rollback()
return fmt.Errorf("UpdateIssueCols: %v", err)
}

var opts = &CreateCommentOptions{
Type: CommentTypeRef,
Doer: doer,
Repo: issue.Repo,
Issue: issue,
OldRef: oldRef,
NewRef: newRef,
}
if _, err = createComment(sess, opts); err != nil {
sess.Rollback()
return err
}

return sess.Commit()
}

// GetTasks returns the amount of tasks in the issues content
func (issue *Issue) GetTasks() int {
return len(issueTasksPat.FindAllStringIndex(issue.Content, -1))


+ 2
- 0
models/issue_comment.go View File

@@ -90,6 +90,8 @@ const (
CommentTypeReviewRequest
// merge pull request
CommentTypeMergePull
// Ref changed
CommentTypeRef
)

// CommentTag defines comment tag type


+ 1
- 0
models/repo_list.go View File

@@ -221,6 +221,7 @@ const (
SearchOrderByHot SearchOrderBy = "(num_watches + num_stars + num_forks + clone_cnt) DESC"
SearchOrderByActive SearchOrderBy = "(num_issues + num_pulls + num_commit) DESC"
SearchOrderByWatches SearchOrderBy = "num_watches DESC"
SearchOrderByDefault SearchOrderBy = "recommend desc,num_stars DESC,updated_unix DESC"
)

// SearchRepositoryCondition creates a query condition according search repository options


+ 353
- 44
models/user_business_analysis.go View File

@@ -82,17 +82,27 @@ type UserBusinessAnalysisAll struct {
DataDate string `xorm:"NULL"`

//cloudbraintask
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysis struct {
@@ -159,17 +169,27 @@ type UserBusinessAnalysis struct {

DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisQueryOptions struct {
@@ -410,8 +430,10 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
log.Info("truncate all data from table: " + tableName)
statictisSess.Exec("TRUNCATE TABLE " + tableName)

log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05"))
log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05"))
StartTimeNextDay := pageStartTime.AddDate(0, 0, 1)
EndTimeNextDay := pageEndTime.AddDate(0, 0, 1)
log.Info("pageStartTime:" + pageStartTime.Format("2006-01-02 15:04:05") + " nextDay:" + StartTimeNextDay.Format("2006-01-02 15:04:05"))
log.Info("pageEndTime time:" + pageEndTime.Format("2006-01-02 15:04:05") + " nextDay:" + EndTimeNextDay.Format("2006-01-02 15:04:05"))

start_unix := pageStartTime.Unix()
end_unix := pageEndTime.Unix()
@@ -426,8 +448,8 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
CommentCountMap := queryComment(start_unix, end_unix)
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap := queryFollow(start_unix, end_unix)
CommitCodeSizeMap := queryCommitCodeSize(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)
CommitCodeSizeMap := queryCommitCodeSize(StartTimeNextDay.Unix(), EndTimeNextDay.Unix())
CommitDatasetSizeMap, CommitDatasetNumMap := queryDatasetSize(start_unix, end_unix)
SolveIssueCountMap := querySolveIssue(start_unix, end_unix)
CreateRepoCountMap := queryUserCreateRepo(start_unix, end_unix)
@@ -436,6 +458,12 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

DataDate := currentTimeNow.Format("2006-01-02") + " 00:01"

cond := "type != 1 and is_active=true"
@@ -472,6 +500,7 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap)
dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap)
dateRecordAll.FocusRepoCount = getMapValue(dateRecordAll.ID, FocusRepoCountMap)
dateRecordAll.FocusOtherUser = getMapValue(dateRecordAll.ID, WatchOtherMap)
dateRecordAll.StarRepoCount = getMapValue(dateRecordAll.ID, StarRepoCountMap)
dateRecordAll.WatchedCount = getMapValue(dateRecordAll.ID, WatchedCountMap)
dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap)
@@ -496,13 +525,20 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS
dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap)
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecordAll.CommitModelCount = getMapValue(dateRecordAll.ID, AiModelManageMap)
dateRecordAll.UserIndex = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndex
if maxUserIndex < dateRecordAll.UserIndex {
maxUserIndex = dateRecordAll.UserIndex
dateRecordAll.CollectDataset = getMapValue(dateRecordAll.ID, CollectDataset)
dateRecordAll.CollectedDataset = getMapValue(dateRecordAll.ID, CollectedDataset)
dateRecordAll.RecommendDataset = getMapValue(dateRecordAll.ID, RecommendDataset)
dateRecordAll.CollectImage = getMapValue(dateRecordAll.ID, CollectImage)
dateRecordAll.CollectedImage = getMapValue(dateRecordAll.ID, CollectedImage)
dateRecordAll.RecommendImage = getMapValue(dateRecordAll.ID, RecommendImage)

dateRecordAll.UserIndexPrimitive = getUserIndexFromAnalysisAll(dateRecordAll, ParaWeight)
userIndexMap[dateRecordAll.ID] = dateRecordAll.UserIndexPrimitive
if maxUserIndex < dateRecordAll.UserIndexPrimitive {
maxUserIndex = dateRecordAll.UserIndexPrimitive
}
if minUserIndex > dateRecordAll.UserIndex {
minUserIndex = dateRecordAll.UserIndex
if minUserIndex > dateRecordAll.UserIndexPrimitive {
minUserIndex = dateRecordAll.UserIndexPrimitive
}
dateRecordBatch = append(dateRecordBatch, dateRecordAll)
if len(dateRecordBatch) >= BATCH_INSERT_SIZE {
@@ -552,7 +588,7 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static

insertBatchSql := "INSERT INTO public." + tableName +
"(id, count_date, code_merge_count, commit_count, issue_count, comment_count, focus_repo_count, star_repo_count, watched_count, gitea_age_month, commit_code_size, commit_dataset_size, " +
"commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location) " +
"commit_model_count, solve_issue_count, encyclopedias_count, regist_date, create_repo_count, login_count, open_i_index, email, name, data_date,cloud_brain_task_num,gpu_debug_job,npu_debug_job,gpu_train_job,npu_train_job,npu_inference_job,gpu_bench_mark_job,cloud_brain_run_time,commit_dataset_num,user_index,user_location,focus_other_user,collect_dataset,collected_dataset,recommend_dataset,collect_image,collected_image,recommend_image,user_index_primitive) " +
"VALUES"

for i, record := range dateRecords {
@@ -560,7 +596,8 @@ func insertTable(dateRecords []UserBusinessAnalysisAll, tableName string, static
", " + fmt.Sprint(record.IssueCount) + ", " + fmt.Sprint(record.CommentCount) + ", " + fmt.Sprint(record.FocusRepoCount) + ", " + fmt.Sprint(record.StarRepoCount) +
", " + fmt.Sprint(record.WatchedCount) + ", " + fmt.Sprint(record.GiteaAgeMonth) + ", " + fmt.Sprint(record.CommitCodeSize) + ", " + fmt.Sprint(record.CommitDatasetSize) +
", " + fmt.Sprint(record.CommitModelCount) + ", " + fmt.Sprint(record.SolveIssueCount) + ", " + fmt.Sprint(record.EncyclopediasCount) + ", " + fmt.Sprint(record.RegistDate) +
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "')"
", " + fmt.Sprint(record.CreateRepoCount) + ", " + fmt.Sprint(record.LoginCount) + ", " + fmt.Sprint(record.OpenIIndex) + ", '" + record.Email + "', '" + record.Name + "', '" + record.DataDate + "'," + fmt.Sprint(record.CloudBrainTaskNum) + "," + fmt.Sprint(record.GpuDebugJob) + "," + fmt.Sprint(record.NpuDebugJob) + "," + fmt.Sprint(record.GpuTrainJob) + "," + fmt.Sprint(record.NpuTrainJob) + "," + fmt.Sprint(record.NpuInferenceJob) + "," + fmt.Sprint(record.GpuBenchMarkJob) + "," + fmt.Sprint(record.CloudBrainRunTime) + "," + fmt.Sprint(record.CommitDatasetNum) + "," + fmt.Sprint(record.UserIndex) + ",'" + record.UserLocation + "'," +
fmt.Sprint(record.FocusOtherUser) + "," + fmt.Sprint(record.CollectDataset) + "," + fmt.Sprint(record.CollectedDataset) + "," + fmt.Sprint(record.RecommendDataset) + "," + fmt.Sprint(record.CollectImage) + "," + fmt.Sprint(record.CollectedImage) + "," + fmt.Sprint(record.RecommendImage) + "," + fmt.Sprint(record.UserIndexPrimitive) + ")"
if i < (len(dateRecords) - 1) {
insertBatchSql += ","
}
@@ -628,7 +665,7 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
CommentCountMap := queryComment(start_unix, end_unix)
FocusRepoCountMap := queryWatch(start_unix, end_unix)
StarRepoCountMap := queryStar(start_unix, end_unix)
WatchedCountMap := queryFollow(start_unix, end_unix)
WatchedCountMap, WatchOtherMap := queryFollow(start_unix, end_unix)

CommitCodeSizeMap, err := GetAllUserKPIStats()
if err != nil {
@@ -643,6 +680,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix)
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix)
AiModelManageMap := queryUserModel(start_unix, end_unix)

CollectDataset, CollectedDataset := queryDatasetStars(start_unix, end_unix)
RecommendDataset := queryRecommedDataSet(start_unix, end_unix)
CollectImage, CollectedImage := queryImageStars(start_unix, end_unix)
RecommendImage := queryRecommedImage(start_unix, end_unix)

statictisSess := xStatistic.NewSession()
defer statictisSess.Close()

@@ -683,13 +726,12 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.FocusRepoCount = getMapValue(dateRecord.ID, FocusRepoCountMap)
dateRecord.StarRepoCount = getMapValue(dateRecord.ID, StarRepoCountMap)
dateRecord.WatchedCount = getMapValue(dateRecord.ID, WatchedCountMap)
dateRecord.FocusOtherUser = getMapValue(dateRecord.ID, WatchOtherMap)
if _, ok := CommitCodeSizeMap[dateRecord.Email]; !ok {
dateRecord.CommitCodeSize = 0
} else {
dateRecord.CommitCodeSize = int(CommitCodeSizeMap[dateRecord.Email].CommitLines)
}

dateRecord.CommitDatasetSize = getMapValue(dateRecord.ID, CommitDatasetSizeMap)
dateRecord.CommitDatasetNum = getMapValue(dateRecord.ID, CommitDatasetNumMap)
dateRecord.SolveIssueCount = getMapValue(dateRecord.ID, SolveIssueCountMap)
@@ -715,7 +757,15 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time,
dateRecord.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap)
dateRecord.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecord.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap)
dateRecord.CommitModelCount = getMapValue(dateRecord.ID, AiModelManageMap)
dateRecord.UserIndex = getUserIndex(dateRecord, ParaWeight)

dateRecord.CollectDataset = getMapValue(dateRecord.ID, CollectDataset)
dateRecord.CollectedDataset = getMapValue(dateRecord.ID, CollectedDataset)
dateRecord.RecommendDataset = getMapValue(dateRecord.ID, RecommendDataset)
dateRecord.CollectImage = getMapValue(dateRecord.ID, CollectImage)
dateRecord.CollectedImage = getMapValue(dateRecord.ID, CollectedImage)
dateRecord.RecommendImage = getMapValue(dateRecord.ID, RecommendImage)

dateRecord.UserIndexPrimitive = getUserIndex(dateRecord, ParaWeight)
setUserMetrics(userMetrics, userRecord, start_unix, end_unix, dateRecord)
_, err = statictisSess.Insert(&dateRecord)
if err != nil {
@@ -765,7 +815,7 @@ func setUserMetrics(userMetrics map[string]int, user *User, start_time int64, en
userMetrics["TotalActivateRegistUser"] = getMapKeyStringValue("TotalActivateRegistUser", userMetrics) + 1
}

if dateRecord.UserIndex > 0 || dateRecord.LoginCount > 0 {
if getUserActivate(dateRecord) > 0 {
userMetrics["HasActivityUser"] = getMapKeyStringValue("HasActivityUser", userMetrics) + 1
}

@@ -802,7 +852,12 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
codeLine := float64(dateRecord.CommitCodeSize)
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
if codeLine >= limitCodeLine {
codeLine = limitCodeLine
}
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
@@ -810,6 +865,34 @@ func getUserIndexFromAnalysisAll(dateRecord UserBusinessAnalysisAll, ParaWeight
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)

result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)

return result
}

func getUserActivate(dateRecord UserBusinessAnalysis) int {
var result int
result += dateRecord.CodeMergeCount
result += dateRecord.CommitCount
result += dateRecord.IssueCount
result += dateRecord.CommentCount
result += dateRecord.FocusRepoCount
result += dateRecord.StarRepoCount
result += dateRecord.SolveIssueCount
result += dateRecord.EncyclopediasCount
result += dateRecord.CreateRepoCount
result += dateRecord.CloudBrainTaskNum
result += dateRecord.CommitModelCount
result += dateRecord.CommitDatasetNum
result += dateRecord.FocusOtherUser
result += dateRecord.CollectDataset
result += dateRecord.CollectImage
result += dateRecord.CommitCodeSize
return result
}

@@ -831,7 +914,12 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.StarRepoCount) * getParaWeightValue("StarRepoCount", ParaWeight, 0.1)
result += float64(dateRecord.LoginCount) * getParaWeightValue("LoginCount", ParaWeight, 0.1)
result += float64(dateRecord.WatchedCount) * getParaWeightValue("WatchedCount", ParaWeight, 0.3)
result += float64(dateRecord.CommitCodeSize) * getParaWeightValue("CommitCodeSize", ParaWeight, 0.1)
codeLine := float64(dateRecord.CommitCodeSize)
limitCodeLine := getParaWeightValue("LimitCommitCodeSize", ParaWeight, 1000)
if codeLine >= limitCodeLine {
codeLine = limitCodeLine
}
result += codeLine * getParaWeightValue("CommitCodeSize", ParaWeight, 0.01)
result += float64(dateRecord.SolveIssueCount) * getParaWeightValue("SolveIssueCount", ParaWeight, 0.2)
result += float64(dateRecord.EncyclopediasCount) * getParaWeightValue("EncyclopediasCount", ParaWeight, 0.1)
result += float64(dateRecord.CreateRepoCount) * getParaWeightValue("CreateRepoCount", ParaWeight, 0.05)
@@ -839,6 +927,13 @@ func getUserIndex(dateRecord UserBusinessAnalysis, ParaWeight map[string]float64
result += float64(dateRecord.CommitModelCount) * getParaWeightValue("CommitModelCount", ParaWeight, 0.2)
result += dateRecord.OpenIIndex * getParaWeightValue("OpenIIndex", ParaWeight, 0.1)

result += float64(dateRecord.CollectDataset) * getParaWeightValue("CollectDataset", ParaWeight, 0.1)
result += float64(dateRecord.CollectedDataset) * getParaWeightValue("CollectedDataset", ParaWeight, 0.1)
result += float64(dateRecord.RecommendDataset) * getParaWeightValue("RecommendDataset", ParaWeight, 0.2)
result += float64(dateRecord.CollectImage) * getParaWeightValue("CollectImage", ParaWeight, 0.1)
result += float64(dateRecord.CollectedImage) * getParaWeightValue("CollectedImage", ParaWeight, 0.1)
result += float64(dateRecord.RecommendImage) * getParaWeightValue("RecommendImage", ParaWeight, 0.2)

return result
}

@@ -1129,17 +1224,18 @@ func queryStar(start_unix int64, end_unix int64) map[int64]int {
return resultMap
}

func queryFollow(start_unix int64, end_unix int64) map[int64]int {
func queryFollow(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {

sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
resultFocusedByOtherMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)

count, err := sess.Where(cond).Count(new(Follow))
if err != nil {
log.Info("query follow error. return.")
return resultMap
return resultMap, resultFocusedByOtherMap
}
var indexTotal int64
indexTotal = 0
@@ -1155,6 +1251,11 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int {
} else {
resultMap[followRecord.FollowID] += 1
}
if _, ok := resultFocusedByOtherMap[followRecord.UserID]; !ok {
resultFocusedByOtherMap[followRecord.UserID] = 1
} else {
resultFocusedByOtherMap[followRecord.UserID] += 1
}
}

indexTotal += PAGE_SIZE
@@ -1163,7 +1264,215 @@ func queryFollow(start_unix int64, end_unix int64) map[int64]int {
}
}

return resultMap
return resultMap, resultFocusedByOtherMap
}

func queryRecommedDataSet(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
userIdDdatasetMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and recommend=true"
count, err := sess.Where(cond).Count(new(Dataset))
if err != nil {
log.Info("query recommend dataset error. return.")
return userIdDdatasetMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id,recommend").Where(cond).Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetList := make([]*Dataset, 0)
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdDdatasetMap
}

func queryAllDataSet() (map[int64]int64, map[int64]int64) {
sess := x.NewSession()
defer sess.Close()
datasetUserIdMap := make(map[int64]int64)
userIdDdatasetMap := make(map[int64]int64)
count, err := sess.Count(new(Dataset))
if err != nil {
log.Info("query dataset error. return.")
return datasetUserIdMap, userIdDdatasetMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,user_id").Table(new(Dataset)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetList := make([]*Dataset, 0)
sess.Find(&datasetList)
log.Info("query datasetList size=" + fmt.Sprint(len(datasetList)))
for _, datasetRecord := range datasetList {
datasetUserIdMap[datasetRecord.ID] = datasetRecord.UserID
if _, ok := userIdDdatasetMap[datasetRecord.UserID]; !ok {
userIdDdatasetMap[datasetRecord.UserID] = 1
} else {
userIdDdatasetMap[datasetRecord.UserID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return datasetUserIdMap, userIdDdatasetMap
}

func queryRecommedImage(start_unix int64, end_unix int64) map[int64]int {
sess := x.NewSession()
defer sess.Close()
userIdImageMap := make(map[int64]int)
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) + " and type=5"
count, err := sess.Where(cond).Count(new(Image))
if err != nil {
log.Info("query recommend image error. return.")
return userIdImageMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,type").Where(cond).Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageList := make([]*Image, 0)
sess.Find(&imageList)
log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
for _, imageRecord := range imageList {
if _, ok := userIdImageMap[imageRecord.UID]; !ok {
userIdImageMap[imageRecord.UID] = 1
} else {
userIdImageMap[imageRecord.UID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return userIdImageMap
}

func queryAllImage() (map[int64]int64, map[int64]int64) {
sess := x.NewSession()
defer sess.Close()
imageUserIdMap := make(map[int64]int64)
userIdDImageMap := make(map[int64]int64)
count, err := sess.Count(new(Image))
if err != nil {
log.Info("query image error. return.")
return imageUserIdMap, userIdDImageMap
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid").Table(new(Image)).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageList := make([]*Image, 0)
sess.Find(&imageList)
log.Info("query imageList size=" + fmt.Sprint(len(imageList)))
for _, imageRecord := range imageList {
imageUserIdMap[imageRecord.ID] = imageRecord.UID
if _, ok := userIdDImageMap[imageRecord.UID]; !ok {
userIdDImageMap[imageRecord.UID] = 1
} else {
userIdDImageMap[imageRecord.UID] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return imageUserIdMap, userIdDImageMap
}

func queryDatasetStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
datasetCollect := make(map[int64]int)
datasetCollected := make(map[int64]int)
datasetUserIdMap, _ := queryAllDataSet()
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(DatasetStar))
if err != nil {
log.Info("query follow error. return.")
return datasetCollect, datasetCollected
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,dataset_id").Table(new(DatasetStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
datasetStarList := make([]*DatasetStar, 0)
sess.Find(&datasetStarList)
log.Info("query datasetStarList size=" + fmt.Sprint(len(datasetStarList)))
for _, datasetStarRecord := range datasetStarList {
if _, ok := datasetCollect[datasetStarRecord.UID]; !ok {
datasetCollect[datasetStarRecord.UID] = 1
} else {
datasetCollect[datasetStarRecord.UID] += 1
}
if _, ok := datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]]; !ok {
datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] = 1
} else {
datasetCollected[datasetUserIdMap[datasetStarRecord.DatasetID]] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return datasetCollect, datasetCollected
}

func queryImageStars(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {
sess := x.NewSession()
defer sess.Close()
imageCollect := make(map[int64]int)
imageCollected := make(map[int64]int)
imageUserIdMap, _ := queryAllDataSet()
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Where(cond).Count(new(ImageStar))
if err != nil {
log.Info("query follow error. return.")
return imageCollect, imageCollected
}
var indexTotal int64
indexTotal = 0
for {
sess.Select("id,uid,dataset_id").Table(new(ImageStar)).Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal))
imageStarList := make([]*ImageStar, 0)
sess.Find(&imageStarList)
log.Info("query imageStarList size=" + fmt.Sprint(len(imageStarList)))
for _, imageStarRecord := range imageStarList {
if _, ok := imageCollect[imageStarRecord.UID]; !ok {
imageCollect[imageStarRecord.UID] = 1
} else {
imageCollect[imageStarRecord.UID] += 1
}
if _, ok := imageCollected[imageUserIdMap[imageStarRecord.ImageID]]; !ok {
imageCollected[imageUserIdMap[imageStarRecord.ImageID]] = 1
} else {
imageCollected[imageUserIdMap[imageStarRecord.ImageID]] += 1
}
}
indexTotal += PAGE_SIZE
if indexTotal >= count {
break
}
}
return imageCollect, imageCollected
}

func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int64]int) {


+ 122
- 66
models/user_business_struct.go View File

@@ -45,17 +45,26 @@ type UserBusinessAnalysisCurrentYear struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisLast30Day struct {
@@ -101,17 +110,26 @@ type UserBusinessAnalysisLast30Day struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisLastMonth struct {
@@ -157,17 +175,26 @@ type UserBusinessAnalysisLastMonth struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisCurrentMonth struct {
@@ -213,17 +240,26 @@ type UserBusinessAnalysisCurrentMonth struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisCurrentWeek struct {
@@ -269,17 +305,27 @@ type UserBusinessAnalysisCurrentWeek struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserBusinessAnalysisYesterday struct {
@@ -325,17 +371,27 @@ type UserBusinessAnalysisYesterday struct {
Name string `xorm:"NOT NULL"`
DataDate string `xorm:"NULL"`

CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserLocation string `xorm:"NULL"`
CloudBrainTaskNum int `xorm:"NOT NULL DEFAULT 0"`
GpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
NpuDebugJob int `xorm:"NOT NULL DEFAULT 0"`
GpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuTrainJob int `xorm:"NOT NULL DEFAULT 0"`
NpuInferenceJob int `xorm:"NOT NULL DEFAULT 0"`
GpuBenchMarkJob int `xorm:"NOT NULL DEFAULT 0"`
CloudBrainRunTime int `xorm:"NOT NULL DEFAULT 0"`
CommitDatasetNum int `xorm:"NOT NULL DEFAULT 0"`
UserIndex float64 `xorm:"NOT NULL DEFAULT 0"`
UserIndexPrimitive float64 `xorm:"NOT NULL DEFAULT 0"`

UserLocation string `xorm:"NULL"`

FocusOtherUser int `xorm:"NOT NULL DEFAULT 0"`
CollectDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectedDataset int `xorm:"NOT NULL DEFAULT 0"`
RecommendDataset int `xorm:"NOT NULL DEFAULT 0"`
CollectImage int `xorm:"NOT NULL DEFAULT 0"`
CollectedImage int `xorm:"NOT NULL DEFAULT 0"`
RecommendImage int `xorm:"NOT NULL DEFAULT 0"`
}

type UserAnalysisPara struct {


+ 10
- 0
modules/auth/cloudbrain.go View File

@@ -33,6 +33,16 @@ type CommitImageCloudBrainForm struct {
Topics string `form:"topics"`
}

type CommitAdminImageCloudBrainForm struct {
Description string `form:"description" binding:"Required"`
Type int `form:"type" binding:"Required"`
Tag string `form:"tag" binding:"Required;MaxSize(100)" `
IsPrivate bool `form:"isPrivate" binding:"Required"`
Topics string `form:"topics"`
Place string `form:"place" binding:"Required"`
IsRecommend bool `form:"isRecommend" binding:"Required"`
}

type EditImageCloudBrainForm struct {
ID int64 `form:"id" binding:"Required"`
Description string `form:"description" binding:"Required"`


+ 1
- 0
modules/cloudbrain/cloudbrain.go View File

@@ -475,6 +475,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
ComputeResource: task.ComputeResource,
CreatedUnix: createTime,
UpdatedUnix: createTime,
BranchName: task.BranchName,
}

err = models.RestartCloudbrain(task, newTask)


+ 40
- 1
modules/cloudbrain/resty.go View File

@@ -312,12 +312,51 @@ sendjob:
return nil
})
if err == nil {

go updateImageStatus(image, isSetCreatedUnix, createTime)
}
return err
}

func CommitAdminImage(params models.CommitImageParams) error {

exist, err := models.IsImageExist(params.ImageTag)

if err != nil {
return fmt.Errorf("resty CommitImage: %v", err)
}
if exist {
return models.ErrorImageTagExist{
Tag: params.ImageTag,
}
}

image := models.Image{
CloudbrainType: params.CloudBrainType,
UID: params.UID,
IsPrivate: params.IsPrivate,
Tag: params.ImageTag,
Description: params.ImageDescription,
Place: params.Place,
Status: models.IMAGE_STATUS_SUCCESS,
Type: params.Type,
}

err = models.WithTx(func(ctx models.DBContext) error {

if err := models.CreateLocalImage(&image); err != nil {
log.Error("Failed to insert image record.", err)
return fmt.Errorf("resty CommitImage: %v", err)
}

if err := models.SaveImageTopics(image.ID, params.Topics...); err != nil {
log.Error("Failed to insert image record.", err)
return fmt.Errorf("resty CommitImage: %v", err)
}
return nil
})
return err
}

func updateImageStatus(image models.Image, isSetCreatedUnix bool, createTime time.Time) {
attemps := 5
commitSuccess := false


+ 54
- 3
modules/repository/hooks.go View File

@@ -19,7 +19,11 @@ import (
"xorm.io/builder"
)

func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
const (
SIZE_LIMIT_SCRIPT_NAME = "size_limit"
)

func getHookTemplates() (hookNames, hookTpls, giteaHookTpls, sizeLimitTpls []string) {
hookNames = []string{"pre-receive", "update", "post-receive"}
hookTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
@@ -31,6 +35,11 @@ func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
}
sizeLimitTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\n\n\nset -o pipefail\n\nreadonly DEFAULT_FILE_MAXSIZE_MB=\"30\" \nreadonly CONFIG_NAME=\"hooks.maxfilesize\"\nreadonly NULLSHA=\"0000000000000000000000000000000000000000\"\nreadonly EXIT_SUCCESS=0\nreadonly EXIT_FAILURE=1\nreadonly DEFAULT_REPO_MAXSIZE_MB=\"1024\" \nreadonly CHECK_FLAG_ON=1\n\n\nstatus=\"$EXIT_SUCCESS\"\n\n# skip this hook entirely if shell check is not open\ncheck_flag=${PUSH_SIZE_CHECK_FLAG}\nif [[ $check_flag != $CHECK_FLAG_ON ]]; then\nexit $EXIT_SUCCESS\nfi\n\n\n#######################################\n# check the file max size limit\n#######################################\n\n# get maximum filesize (from repository-specific config)\nmaxsize_mb=\"${REPO_MAX_FILE_SIZE}\"\n\nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\necho \"failed to get ${CONFIG_NAME} from config\"\nexit \"$EXIT_FAILURE\"\nfi\n\npush_size=\"0\"\n# read lines from stdin (format: \"<oldref> <newref> <refname>\\n\")\nwhile read oldref newref refname; do\n# skip branch deletions\nif [[ \"$newref\" == \"$NULLSHA\" ]]; then\n continue\nfi\n\n# find large objects\n# check all objects from $oldref (possible $NULLSHA) to $newref, but\n# skip all objects that have already been accepted (i.e. are referenced by\n# another branch or tag).\n\nif [[ \"$oldref\" == \"$NULLSHA\" ]]; then\n target=\"$newref\"\nelse\n target=\"${oldref}..${newref}\"\nfi\nmaxsize=`expr $maxsize_mb \\* 1048576` \n\n# find objects in this push_size\n# print like:\n# 08da8e2ab9ae4095bf94dd71ac913132b880b463 commit 214\n# 43e993b768ede5740e8c65de2ed6edec25053ea1 tree 185\n# 4476971d76569039df7569af1b8d03c288f6b193 blob 20167318 b0417e6593a1.zip\nfiles=\"$(git rev-list --objects \"$target\" --tags=\\* | \\\n git cat-file $'--batch-check=%%(objectname) %%(objecttype) %%(objectsize) %%(rest)')\"\n \nif [[ \"$?\" != $EXIT_SUCCESS ]]; then\n echo \"failed to check for large files in ref ${refname}\"\n continue\nfi\n\n# rewrite IFS to seperate line in $files\nIFS=$'\\n'\nfor file in $files; do\n # if don't unset IFS,temp_array=(${file}) will get error answer\n unset IFS\n temp_array=(${file})\n # add all commit files size\n push_size=`expr $push_size + ${temp_array[2]}`\n if [[ ${temp_array[2]} -gt $maxsize ]]; then\n\t if [[ \"$status\" == $EXIT_SUCCESS ]]; then\n\t\techo -e \"Error: Your push was rejected because it contains files larger than $(numfmt --to=iec \"$maxsize_mb\") Mb\"\n\t\techo \"oversize files:\"\n\t\tstatus=\"$EXIT_FAILURE\"\n\t fi\n\t echo -e \"\\033[31m- ${temp_array[3]} \\033[0m (ref: ${refname}) \"\n fi\ndone\n\nif [[ \"$status\" != $EXIT_SUCCESS ]]; then\n\texit \"$status\"\nfi\n\ndone\n\n#######################################\n# check the repo max size limit\n#######################################\nif [[ $push_size -eq \"0\" ]]; then\n\texit $EXIT_SUCCESS\nfi\n\n\nsizelimit_mb=\"${REPO_MAX_SIZE}\"\nlet sizelimit_b=$sizelimit_mb*1024*1024\n\n# repo size at here means the size of repo directory in server \nreposize_b=${REPO_CURRENT_SIZE}\n\ntotal=`expr $push_size + $reposize_b`\n\nif [ $total -gt $sizelimit_b ]; then\n echo \"Error: Your push was rejected because the repository size is large than $sizelimit_mb Mb\"\n exit $EXIT_FAILURE\nfi\n\n\nexit $EXIT_SUCCESS\n", setting.ScriptType, setting.CustomConf),
fmt.Sprintf(""),
fmt.Sprintf(""),
}
return
}

@@ -41,7 +50,7 @@ func CreateDelegateHooks(repoPath string) error {

// createDelegateHooks creates all the hooks scripts for the repo
func createDelegateHooks(repoPath string) (err error) {
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates()
hookDir := filepath.Join(repoPath, "hooks")

for i, hookName := range hookNames {
@@ -74,8 +83,26 @@ func createDelegateHooks(repoPath string) (err error) {
if err = ensureExecutable(newHookPath); err != nil {
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
}

if err = writeHookTpl(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i]); err != nil {
return err
}
}

return nil
}

func writeHookTpl(hookPath, content string) error {
if content == "" {
return nil
}
if err := ioutil.WriteFile(hookPath, []byte(content), 0777); err != nil {
return fmt.Errorf("write new hook file '%s': %v", hookPath, err)
}

if err := ensureExecutable(hookPath); err != nil {
return fmt.Errorf("Unable to set %s executable. Error %v", hookPath, err)
}
return nil
}

@@ -101,7 +128,7 @@ func ensureExecutable(filename string) error {

// CheckDelegateHooks checks the hooks scripts for the repo
func CheckDelegateHooks(repoPath string) ([]string, error) {
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
hookNames, hookTpls, giteaHookTpls, sizeLimitTpls := getHookTemplates()

hookDir := filepath.Join(repoPath, "hooks")
results := make([]string, 0, 10)
@@ -146,10 +173,34 @@ func CheckDelegateHooks(repoPath string) ([]string, error) {
if !checkExecutable(newHookPath) {
results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath))
}
if err = checkHookFile(generateHookScriptPath(hookDir, hookName, SIZE_LIMIT_SCRIPT_NAME), sizeLimitTpls[i], results); err != nil {
return results, err
}
}
return results, nil
}

func generateHookScriptPath(hookDir, hookName, fileName string) string {
return filepath.Join(hookDir, hookName+".d", fileName)
}

func checkHookFile(filePath, tpl string, results []string) error {
if tpl == "" {
return nil
}
contents, err := ioutil.ReadFile(filePath)
if err != nil {
return err
}
if string(contents) != tpl {
results = append(results, fmt.Sprintf("old hook file %s is out of date", filePath))
}
if !checkExecutable(filePath) {
results = append(results, fmt.Sprintf("old hook file %s is not executable", filePath))
}
return nil
}

// SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks
// to make sure the binary and custom conf path are up-to-date.
func SyncRepositoryHooks(ctx context.Context) error {


+ 3
- 0
modules/setting/repository.go View File

@@ -56,6 +56,7 @@ var (
FileMaxSize int64
MaxFiles int
TotalMaxSize int64
ShellFlag int
} `ini:"-"`

// Repository local settings
@@ -125,6 +126,7 @@ var (
FileMaxSize int64
MaxFiles int
TotalMaxSize int64
ShellFlag int
}{
Enabled: true,
TempPath: "data/tmp/uploads",
@@ -132,6 +134,7 @@ var (
FileMaxSize: 30,
MaxFiles: 10,
TotalMaxSize: 1024,
ShellFlag: 0,
},

// Repository local settings


+ 26
- 0
modules/ssh/ssh.go View File

@@ -69,8 +69,17 @@ func sessionHandler(session ssh.Session) {
os.Environ(),
"SSH_ORIGINAL_COMMAND="+command,
"SKIP_MINWINSVC=1",
models.EnvRepoMaxFileSize+"="+fmt.Sprint(setting.Repository.Upload.FileMaxSize),
models.EnvRepoMaxSize+"="+fmt.Sprint(setting.Repository.RepoMaxSize),
models.EnvPushSizeCheckFlag+"="+fmt.Sprint(setting.Repository.Upload.ShellFlag),
)

if strings.HasPrefix(command, "git-receive-pack") {
repo := getRepoFromCommandStr(command)
if repo != nil {
cmd.Env = append(cmd.Env, models.EnvRepoSize+"="+fmt.Sprint(repo.Size))
}
}
stdout, err := cmd.StdoutPipe()
if err != nil {
log.Error("SSH: StdoutPipe: %v", err)
@@ -131,6 +140,23 @@ func sessionHandler(session ssh.Session) {
}
}

func getRepoFromCommandStr(command string) *models.Repository {
repoPath := strings.TrimPrefix(command, "git-receive-pack '")
repoPath = strings.TrimSuffix(repoPath, ".git'")
if repoPath != "" {
nameArray := strings.Split(repoPath, "/")
if len(nameArray) >= 2 {
ownerName := nameArray[0]
repoName := nameArray[1]
if repo, err := models.GetRepositoryByOwnerAndName(ownerName, repoName); err == nil {
return repo
}
}
}
return nil

}

func publicKeyHandler(ctx ssh.Context, key ssh.PublicKey) bool {
if ctx.User() != setting.SSH.BuiltinServerUser {
return false


+ 7
- 1
modules/storage/minio_ext.go View File

@@ -2,6 +2,7 @@ package storage

import (
"encoding/xml"
"errors"
"path"
"sort"
"strconv"
@@ -129,7 +130,7 @@ func NewMultiPartUpload(uuid string) (string, error) {
return core.NewMultipartUpload(bucketName, objectName, miniov6.PutObjectOptions{})
}

func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) {
func CompleteMultiPartUpload(uuid string, uploadID string, totalChunks int) (string, error) {
client, core, err := getClients()
if err != nil {
log.Error("getClients failed:", err.Error())
@@ -146,6 +147,11 @@ func CompleteMultiPartUpload(uuid string, uploadID string) (string, error) {
return "", err
}

if len(partInfos) != totalChunks {
log.Error("ListObjectParts number(%d) is not equal the set total chunk number(%d)", len(partInfos), totalChunks)
return "", errors.New("the parts is not complete")
}

var complMultipartUpload completeMultipartUpload
for _, partInfo := range partInfos {
complMultipartUpload.Parts = append(complMultipartUpload.Parts, miniov6.CompletePart{


+ 52
- 38
modules/storage/obs.go View File

@@ -59,21 +59,55 @@ func ObsHasObject(path string) (bool, error) {
return hasObject, nil
}

func listAllParts(uuid, uploadID, key string) (output *obs.ListPartsOutput, err error) {
output = &obs.ListPartsOutput{}
partNumberMarker := 0
for {
temp, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: key,
UploadId: uploadID,
MaxParts: MAX_LIST_PARTS,
PartNumberMarker: partNumberMarker,
})
if err != nil {
log.Error("ListParts failed:", err.Error())
return output, err
}

partNumberMarker = temp.NextPartNumberMarker
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, temp.MaxParts, temp.PartNumberMarker, temp.NextPartNumberMarker, len(temp.Parts))

for _, partInfo := range temp.Parts {
output.Parts = append(output.Parts, obs.Part{
PartNumber: partInfo.PartNumber,
ETag: partInfo.ETag,
})
}

if !temp.IsTruncated {
break
} else {
continue
}

break
}

return output, nil
}

func GetObsPartInfos(uuid, uploadID, fileName string) (string, error) {
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")

output, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: key,
UploadId: uploadID,
})
allParts, err := listAllParts(uuid, uploadID, key)
if err != nil {
log.Error("ListParts failed:", err.Error())
log.Error("listAllParts failed: %v", err)
return "", err
}

var chunks string
for _, partInfo := range output.Parts {
for _, partInfo := range allParts.Parts {
chunks += strconv.Itoa(partInfo.PartNumber) + "-" + partInfo.ETag + ","
}

@@ -94,45 +128,25 @@ func NewObsMultiPartUpload(uuid, fileName string) (string, error) {
return output.UploadId, nil
}

func CompleteObsMultiPartUpload(uuid, uploadID, fileName string) error {
func CompleteObsMultiPartUpload(uuid, uploadID, fileName string, totalChunks int) error {
input := &obs.CompleteMultipartUploadInput{}
input.Bucket = setting.Bucket
input.Key = strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/")
input.UploadId = uploadID

partNumberMarker := 0
for {
output, err := ObsCli.ListParts(&obs.ListPartsInput{
Bucket: setting.Bucket,
Key: input.Key,
UploadId: uploadID,
MaxParts: MAX_LIST_PARTS,
PartNumberMarker: partNumberMarker,
})
if err != nil {
log.Error("ListParts failed:", err.Error())
return err
}

partNumberMarker = output.NextPartNumberMarker
log.Info("uuid:%s, MaxParts:%d, PartNumberMarker:%d, NextPartNumberMarker:%d, len:%d", uuid, output.MaxParts, output.PartNumberMarker, output.NextPartNumberMarker, len(output.Parts))

for _, partInfo := range output.Parts {
input.Parts = append(input.Parts, obs.Part{
PartNumber: partInfo.PartNumber,
ETag: partInfo.ETag,
})
}

if len(output.Parts) < output.MaxParts {
break
} else {
continue
}
allParts, err := listAllParts(uuid, uploadID, input.Key)
if err != nil {
log.Error("listAllParts failed: %v", err)
return err
}

break
if len(allParts.Parts) != totalChunks {
log.Error("listAllParts number(%d) is not equal the set total chunk number(%d)", len(allParts.Parts), totalChunks)
return errors.New("the parts is not complete")
}

input.Parts = allParts.Parts

output, err := ObsCli.CompleteMultipartUpload(input)
if err != nil {
log.Error("CompleteMultipartUpload failed:", err.Error())


+ 29
- 2
modules/templates/helper.go View File

@@ -40,6 +40,14 @@ import (
"github.com/editorconfig/editorconfig-core-go/v2"
)

const (
REF_HEADS_PREFIX = "refs/heads/"
REF_TAGS_PREFIX = "refs/tags/"
REF_TYPE_BRANCH = "branch"
REF_TYPE_TAG = "tag"
REF_TYPE_PATTERN = "(refs/heads/|refs/tags/)"
)

// Used from static.go && dynamic.go
var mailSubjectSplit = regexp.MustCompile(`(?m)^-{3,}[\s]*$`)

@@ -317,6 +325,8 @@ func NewFuncMap() []template.FuncMap {
"DatasetPathJoin": func(arr []string, index int, seq string) string {
return strings.Join(arr[1:index+1], seq)
},
"GetRefType": GetRefType,
"GetRefName": GetRefName,
}}
}

@@ -444,10 +454,12 @@ func SafeJS(raw string) template.JS {
func Str2html(raw string) template.HTML {
return template.HTML(markup.Sanitize(raw))
}

//
func subOne(length int)int{
return length-1
func subOne(length int) int {
return length - 1
}

// Escape escapes a HTML string
func Escape(raw string) string {
return html.EscapeString(raw)
@@ -758,3 +770,18 @@ func licenses() []string {
func tasks() []string {
return []string{"machine_translation", "question_answering_system", "information_retrieval", "knowledge_graph", "text_annotation", "text_categorization", "emotion_analysis", "language_modeling", "speech_recognition", "automatic_digest", "information_extraction", "description_generation", "image_classification", "face_recognition", "image_search", "target_detection", "image_description_generation", "vehicle_license_plate_recognition", "medical_image_analysis", "unmanned", "unmanned_security", "drone", "vr_ar", "2_d_vision", "2.5_d_vision", "3_d_reconstruction", "image_processing", "video_processing", "visual_input_system", "speech_coding", "speech_enhancement", "speech_synthesis"}
}

func GetRefType(ref string) string {
if strings.HasPrefix(ref, REF_HEADS_PREFIX) {
return REF_TYPE_BRANCH
}
if strings.HasPrefix(ref, REF_TAGS_PREFIX) {
return REF_TYPE_TAG
}
return ""
}

func GetRefName(ref string) string {
reg := regexp.MustCompile(REF_TYPE_PATTERN)
return reg.ReplaceAllString(ref, "")
}

+ 26
- 2
options/locale/locale_en-US.ini View File

@@ -507,8 +507,16 @@ static.CloudBrainTaskNum=CloudBrain Task Count
static.CloudBrainRunTime=CloudBrain Run Time
static.CommitDatasetNum=Commit Dataset Count
static.CommitModelCount=Commit Model Count
static.UserIndex=User Index
static.UserIndex=Normalized user index
static.UserIndexPrimitive=User Index
static.countdate=Count Date
static.FocusOtherUser=Focus Other User Count
static.CollectDataset=Collect Dataset Count
static.CollectedDataset=Collected Dataset Count
static.RecommendDataset=Recommended Dataset Count
static.CollectImage=Collect Image Count
static.CollectedImage=Collected Image Count
static.RecommendImage=Recommended Image Count
static.all=All
static.public.user_business_analysis_current_month=Current_Month
static.public.user_business_analysis_current_week=Current_Week
@@ -1324,6 +1332,7 @@ issues.new.labels = Labels
issues.new.add_labels_title = Apply labels
issues.new.no_label = No Label
issues.new.clear_labels = Clear labels
issues.new.clear_branch_tag = Clear branch or tag
issues.new.no_items = No items
issues.new.milestone = Milestone
issues.new.add_milestone_title = Set milestone
@@ -1353,6 +1362,13 @@ issues.remove_label_at = removed the <div class="ui label" style="color: %s\; ba
issues.add_milestone_at = `added this to the <b>%s</b> milestone %s`
issues.change_milestone_at = `modified the milestone from <b>%s</b> to <b>%s</b> %s`
issues.remove_milestone_at = `removed this from the <b>%s</b> milestone %s`

issues.add_branch_at=`added this to the <b>%s</b> branch %s`
issues.add_tag_at =`added this to the <b>%s</b> tag %s`
issues.change_branch_tag_at= `modified the branch/tag from <b>%s</b> to <b>%s</b> %s`
issues.remove_branch_at=`removed this from the <b>%s</b> branch %s`
issues.remove_tag_at=`removed this from the <b>%s</b> tag %s`

issues.deleted_milestone = `(deleted)`
issues.self_assign_at = `self-assigned this %s`
issues.add_assignee_at = `was assigned by <b>%s</b> %s`
@@ -1376,6 +1392,7 @@ issues.filter_type.assigned_to_you = Assigned to you
issues.filter_type.created_by_you = Created by you
issues.filter_type.mentioning_you = Mentioning you
issues.filter_sort = Sort
issues.filter_sort.default = Default
issues.filter_sort.latest = Newest
issues.filter_sort.oldest = Oldest
issues.filter_sort.recentupdate = Recently updated
@@ -1389,6 +1406,7 @@ issues.filter_sort.feweststars = Fewest stars
issues.filter_sort.mostforks = Most forks
issues.filter_sort.fewestforks = Fewest forks
issues.filter_sort.downloadtimes = Most downloaded
issues.filter_sort.moststars = Most star
issues.action_open = Open
issues.action_close = Close
issues.action_label = Label
@@ -2502,11 +2520,16 @@ repos.contributor=Contributor
repos.yes=Yes
repos.no=No

images.recommend = Recommend
images.unrecommend = Unrecommend

datasets.dataset_manage_panel= Dataset Manage
datasets.owner=Owner
datasets.name=name
datasets.private=Private
datasets.recommend=Set recommend
datasets.unrecommend=Set unrecommend
datasets.only_recommend = Only show platform recommendations

cloudbrain.all_task_types=All Task Types
cloudbrain.all_computing_resources=All Computing Resources
@@ -2854,7 +2877,7 @@ mirror_sync_create = synced new reference <a href="%s/src/%s">%[2]s</a> to <a hr
mirror_sync_delete = synced and deleted reference <code>%[2]s</code> at <a href="%[1]s">%[3]s</a> from mirror
approve_pull_request = `approved <a href="%s/pulls/%s">%s#%[2]s</a>`
reject_pull_request = `suggested changes for <a href="%s/pulls/%s">%s#%[2]s</a>`
upload_dataset=`upload dataset <a href="%s/datasets?type=%s">%s</a>`
upload_dataset=`upload dataset <a href="%s/datasets">%s</a>`
task_gpudebugjob=`created CPU/GPU type debugging task<a href="%s/cloudbrain/%s">%s</a>`
task_npudebugjob=`created NPU type debugging task <a href="%s/modelarts/notebook/%s">%s</a>`
task_nputrainjob=`created NPU training task<a href="%s/modelarts/train-job/%s">%s</a>`
@@ -2964,6 +2987,7 @@ snn4imagenet_path = Snn4imagenet script path
brainscore_path = Brainscore script path
start_command = Start command
choose_mirror = select mirror or enter mirror path
input_mirror = Please enter image path
select_dataset = select dataset
specification = specification
select_specification = select specification


+ 26
- 2
options/locale/locale_zh-CN.ini View File

@@ -512,8 +512,16 @@ static.CloudBrainTaskNum=云脑任务数
static.CloudBrainRunTime=云脑运行时间(小时)
static.CommitDatasetNum=上传(提交)数据集文件数
static.CommitModelCount=提交模型数
static.UserIndex=用户指数
static.UserIndex=归一化用户指数
static.UserIndexPrimitive=用户指数
static.countdate=系统统计时间
static.FocusOtherUser=关注他人数
static.CollectDataset=收藏数据集
static.CollectedDataset=被收藏数据集
static.RecommendDataset=被推荐数据集数
static.CollectImage=收藏镜像数
static.CollectedImage=被收藏镜像数
static.RecommendImage=被推荐镜像数
static.all=所有
static.public.user_business_analysis_current_month=本月
static.public.user_business_analysis_current_week=本周
@@ -1336,6 +1344,7 @@ issues.new.labels=标签
issues.new.add_labels_title=添加标签
issues.new.no_label=未选择标签
issues.new.clear_labels=清除选中标签
issues.new.clear_branch_tag=清除选中分支/标签
issues.new.no_items=无可选项
issues.new.milestone=里程碑
issues.new.add_milestone_title=设置里程碑
@@ -1365,6 +1374,13 @@ issues.remove_label_at=删除了 <div class="ui label" style="color: %s\; backgr
issues.add_milestone_at=` %[2]s 添加了里程碑 <b>%[1]s</b>`
issues.change_milestone_at=`%[3]s 修改了里程碑从 <b>%[1]s</b> 到 <b>%[2]s</b>`
issues.remove_milestone_at=`%[2]s 删除了里程碑 <b>%[1]s</b>`

issues.add_branch_at=` %[2]s 添加了分支 <b>%[1]s</b>`
issues.add_tag_at =` %[2]s 添加了标签 <b>%[1]s</b>`
issues.change_branch_tag_at=`%[3]s 修改了分支/标签从 <b>%[1]s</b> 到 <b>%[2]s</b>`
issues.remove_branch_at=`%[2]s 删除了分支 <b>%[1]s</b>`
issues.remove_tag_at=`%[2]s 删除了标签 <b>%[1]s</b>`

issues.deleted_milestone= (已删除)
issues.self_assign_at=`于 %s 指派给自己`
issues.add_assignee_at=`于 %[2]s 被 <b>%[1]s</b> 指派`
@@ -1388,6 +1404,7 @@ issues.filter_type.assigned_to_you=指派给您的
issues.filter_type.created_by_you=由您创建的
issues.filter_type.mentioning_you=提及您的
issues.filter_sort=排序
issues.filter_sort.default=默认排序
issues.filter_sort.latest=最新创建
issues.filter_sort.oldest=最早创建
issues.filter_sort.recentupdate=最近更新
@@ -1401,6 +1418,7 @@ issues.filter_sort.feweststars=点赞由少到多
issues.filter_sort.mostforks=派生由多到少
issues.filter_sort.fewestforks=派生由少到多
issues.filter_sort.downloadtimes=下载次数
issues.filter_sort.moststars=收藏数量
issues.action_open=开启
issues.action_close=关闭
issues.action_label=标签
@@ -2512,11 +2530,16 @@ repos.contributor=贡献者数
repos.yes=是
repos.no=否

images.recommend = 推荐
images.unrecommend = 不推荐

datasets.dataset_manage_panel=数据集管理
datasets.owner=所有者
datasets.name=名称
datasets.private=私有
datasets.recommend=设为推荐
datasets.unrecommend=取消推荐
datasets.only_recommend = 仅显示平台推荐

cloudbrain.all_task_types=全部任务类型
cloudbrain.all_computing_resources=全部计算资源
@@ -2864,7 +2887,7 @@ mirror_sync_create=从镜像同步了新的引用 <a href="%s/src/%s">%[2]s</a>
mirror_sync_delete=从镜像同步并从 <a href="%[1]s">%[3]s</a> 删除了引用 <code>%[2]s</code>
approve_pull_request=`同意了 <a href="%s/pulls/%s">%s#%[2]s</a>`
reject_pull_request=`建议变更 <a href="%s/pulls/%s">%s#%[2]s</a>`
upload_dataset=`上传了数据集文件 <a href="%s/datasets?type=%s">%s</a>`
upload_dataset=`上传了数据集文件 <a href="%s/datasets">%s</a>`
task_gpudebugjob=`创建了CPU/GPU类型调试任务 <a href="%s/cloudbrain/%s">%s</a>`
task_npudebugjob=`创建了NPU类型调试任务 <a href="%s/modelarts/notebook/%s">%s</a>`
task_nputrainjob=`创建了NPU类型训练任务 <a href="%s/modelarts/train-job/%s">%s</a>`
@@ -2974,6 +2997,7 @@ snn4imagenet_path = snn4imagenet脚本存放路径
brainscore_path = brainscore脚本存放路径
start_command = 启动命令
choose_mirror = 选择镜像或输入镜像地址
input_mirror = 请输入云脑镜像地址
select_dataset = 选择数据集
specification = 规格
select_specification = 选择资源规格


+ 6
- 1
public/home/home.js View File

@@ -99,6 +99,11 @@ socket.onmessage = function (e) {
console.log("receive action type=" + record.OpType + " name=" + actionName + " but user is null.");
continue;
}
if(record.OpType == "24"){
if(record.Content.indexOf("true") != -1){
continue;
}
}
var recordPrefix = getMsg(record);
if(record.OpType == "6" || record.OpType == "10" || record.OpType == "12" || record.OpType == "13"){
html += recordPrefix + actionName;
@@ -162,7 +167,7 @@ socket.onmessage = function (e) {
function getTaskLink(record){
var re = getRepoLink(record);
if(record.OpType == 24){
re = re + "/datasets?type=" + record.Content;
re = re + "/datasets";
}else if(record.OpType == 25){
re = re + "/cloudbrain/" + record.Content;
}else if(record.OpType == 26){


+ 29
- 22
public/home/search.js View File

@@ -101,16 +101,20 @@ function initPageInfo(){

function searchItem(type,sortType){
console.log("enter item 2.");
currentSearchKeyword = document.getElementById("keyword_input").value;
if(!isEmpty(currentSearchKeyword)){
initPageInfo();
currentSearchTableName = itemType[type];
currentSearchSortBy = sortBy[sortType];
currentSearchAscending = sortAscending[sortType];
OnlySearchLabel =false;
page(currentPage);
if(OnlySearchLabel){
doSearchLabel(currentSearchTableName,currentSearchKeyword,sortBy[sortType],sortAscending[sortType])
}else{
emptySearch();
currentSearchKeyword = document.getElementById("keyword_input").value;
if(!isEmpty(currentSearchKeyword)){
initPageInfo();
currentSearchTableName = itemType[type];
currentSearchSortBy = sortBy[sortType];
currentSearchAscending = sortAscending[sortType];
OnlySearchLabel =false;
page(currentPage);
}else{
emptySearch();
}
}
}

@@ -806,17 +810,14 @@ var repoAndOrgEN={
function page(current){
currentPage=current;
doSearch(currentSearchTableName,currentSearchKeyword,current,pageSize,false,currentSearchSortBy,OnlySearchLabel);
}
function nextPage(){
currentPage = currentPage+1;
console.log("currentPage=" + currentPage);
if(currentPage >= endIndex){
startIndex=startIndex+1;
endIndex = endIndex +1;
}
page(currentPage);
}
@@ -824,10 +825,6 @@ function page(current){
console.log("currentPage=" + currentPage);
if(currentPage > 1){
currentPage = currentPage-1;
if(currentPage <= startIndex && startIndex > 1){
startIndex = startIndex -1;
endIndex = endIndex - 1;
}
console.log("currentPage=" + (currentPage));
page(currentPage);
}
@@ -862,7 +859,7 @@ function getYPosition(e){
showTip(getLabel(isZh,"search_input_large_0"),"warning",left+5,top);
}
else if(goNum<=totalPage){
page(goNum);
page(parseInt(goNum,10));
}
else{
showTip(getLabel(isZh,"search_input_maxed"),"warning",left+5,top);
@@ -884,9 +881,14 @@ function getYPosition(e){
var html ="";
console.log("currentPage=" + currentPage);
console.log("privateTotal=" + privateTotal);
// if(totalPage==0){
// return;
// }
startIndex = currentPage -1;
if(startIndex < 1){
startIndex = 1;
}
endIndex = currentPage + 2;
if(endIndex >= totalPage){
endIndex = totalPage;
}
html += "<span class=\"item\">" + getLabel(isZh,"search_input_total") + " " + totalNum + " " + getLabel(isZh,"search_srtip") + "</span>"
if(currentPage > 1){
html += "<a class=\"item navigation\" href=\"javascript:page(1)\"><span class=\"navigation_label\">" + getLabel(isZh,"search_home_page") + "</span></a>";
@@ -908,6 +910,11 @@ function getYPosition(e){
}
}

if (endIndex < totalPage-1){
html += "...";
html += "<a id=\"page_" + totalPage+ "\" class=\"item\" href=\"javascript:page(" + totalPage +")\">" + totalPage + "</a>";
}

if(currentPage >=totalPage){
html += "<a class=\"disabled item navigation\" href=\"javascript:nextPage()\"><i class=\"icon right arrow\"></i></a>";
html += "<a class=\"disabled item navigation\" href=\"javascript:page(" + totalPage + ")\"><span class=\"navigation_label\">" + getLabel(isZh,"search_last_page") + "</span></a>";


+ 7
- 0
routers/admin/cloudbrains.go View File

@@ -21,6 +21,7 @@ import (
const (
tplCloudBrains base.TplName = "admin/cloudbrain/list"
tplImages base.TplName = "admin/cloudbrain/images"
tplCommitImages base.TplName = "admin/cloudbrain/imagecommit"
EXCEL_DATE_FORMAT = "20060102150405"
CREATE_TIME_FORMAT = "2006/01/02 15:04:05"
)
@@ -114,6 +115,12 @@ func Images(ctx *context.Context) {

}

func CloudBrainCommitImageShow(ctx *context.Context) {
ctx.Data["PageIsAdminImages"] = true
ctx.HTML(200, tplCommitImages)

}

func DownloadCloudBrains(ctx *context.Context) {

page := 1


+ 23
- 1
routers/admin/dataset.go View File

@@ -1,6 +1,8 @@
package admin

import (
"net/http"
"strconv"
"strings"

"code.gitea.io/gitea/models"
@@ -49,6 +51,8 @@ func Datasets(ctx *context.Context) {
orderBy = models.SearchOrderBySizeReverse
case "size":
orderBy = models.SearchOrderBySize
case "downloadtimes":
orderBy = models.SearchOrderByDownloadTimes
case "moststars":
orderBy = models.SearchOrderByStarsReverse
case "feweststars":
@@ -70,6 +74,7 @@ func Datasets(ctx *context.Context) {
PageSize: setting.UI.ExplorePagingNum,
},
Keyword: keyword,
RecommendOnly: ctx.QueryBool("recommend"),
SearchOrderBy: orderBy,
})
if err != nil {
@@ -80,7 +85,7 @@ func Datasets(ctx *context.Context) {
ctx.Data["Keyword"] = keyword
ctx.Data["Total"] = count
ctx.Data["Datasets"] = datasets
ctx.Data["Recommend"] = ctx.QueryBool("recommend")
pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5)
pager.SetDefaultParams(ctx)
ctx.Data["Page"] = pager
@@ -88,6 +93,23 @@ func Datasets(ctx *context.Context) {
ctx.HTML(200, tplDatasets)
}

func DatasetAction(ctx *context.Context) {
var err error
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
switch ctx.Params(":action") {

case "recommend":
err = models.RecommendDataset(datasetId, true)
case "unrecommend":
err = models.RecommendDataset(datasetId, false)
}
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
} else {
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}
}

func DeleteDataset(ctx *context.Context) {
dataset, err := models.GetDatasetByID(ctx.QueryInt64("id"))
if err != nil {


+ 6
- 2
routers/home.go View File

@@ -309,9 +309,11 @@ func ExploreDatasets(ctx *context.Context) {
orderBy = models.SearchOrderByStarsReverse
case "feweststars":
orderBy = models.SearchOrderByStars
case "default":
orderBy = models.SearchOrderByDefault
default:
ctx.Data["SortType"] = "recentupdate"
orderBy = models.SearchOrderByRecentUpdated
ctx.Data["SortType"] = "default"
orderBy = models.SearchOrderByDefault
}

keyword := strings.Trim(ctx.Query("q"), " ")
@@ -331,6 +333,7 @@ func ExploreDatasets(ctx *context.Context) {
Task: task,
License: license,
OwnerID: ownerID,
RecommendOnly: ctx.QueryBool("recommend"),
ListOptions: models.ListOptions{
Page: page,
PageSize: 30,
@@ -357,6 +360,7 @@ func ExploreDatasets(ctx *context.Context) {
ctx.Data["Category"] = category
ctx.Data["Task"] = task
ctx.Data["License"] = license
ctx.Data["Recommend"] = ctx.QueryBool("recommend")
pager.SetDefaultParams(ctx)
ctx.Data["Page"] = pager



+ 4
- 48
routers/repo/attachment.go View File

@@ -11,7 +11,6 @@ import (
"fmt"
"mime/multipart"
"net/http"
"path"
"strconv"
"strings"

@@ -830,20 +829,6 @@ func GetMultipartUploadUrl(ctx *context.Context) {
})
}

func GetObsKey(ctx *context.Context) {
uuid := gouuid.NewV4().String()
key := strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, uuid)), "/")

ctx.JSON(200, map[string]string{
"uuid": uuid,
"key": key,
"access_key_id": setting.AccessKeyID,
"secret_access_key": setting.SecretAccessKey,
"server": setting.Endpoint,
"bucket": setting.Bucket,
})
}

func CompleteMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
uploadID := ctx.Query("uploadID")
@@ -870,13 +855,13 @@ func CompleteMultipart(ctx *context.Context) {
}

if typeCloudBrain == models.TypeCloudBrainOne {
_, err = storage.CompleteMultiPartUpload(uuid, uploadID)
_, err = storage.CompleteMultiPartUpload(uuid, uploadID, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteMultiPartUpload failed: %v", err))
return
}
} else {
err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName)
err = storage.CompleteObsMultiPartUpload(uuid, uploadID, fileName, fileChunk.TotalChunks)
if err != nil {
ctx.Error(500, fmt.Sprintf("CompleteObsMultiPartUpload failed: %v", err))
return
@@ -907,10 +892,9 @@ func CompleteMultipart(ctx *context.Context) {
ctx.Error(500, fmt.Sprintf("InsertAttachment: %v", err))
return
}
attachment.UpdateDatasetUpdateUnix()
repository, _ := models.GetRepositoryByID(dataset.RepoID)
notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(attachment.Type), attachment.Name, models.ActionUploadAttachment)

notification.NotifyOtherTask(ctx.User, repository, fmt.Sprint(repository.IsPrivate, attachment.IsPrivate), attachment.Name, models.ActionUploadAttachment)
if attachment.DatasetID != 0 {
if isCanDecompress(attachment.Name) {
if typeCloudBrain == models.TypeCloudBrainOne {
@@ -947,34 +931,6 @@ func CompleteMultipart(ctx *context.Context) {
})
}

func UpdateMultipart(ctx *context.Context) {
uuid := ctx.Query("uuid")
partNumber := ctx.QueryInt("chunkNumber")
etag := ctx.Query("etag")

fileChunk, err := models.GetFileChunkByUUID(uuid)
if err != nil {
if models.IsErrFileChunkNotExist(err) {
ctx.Error(404)
} else {
ctx.ServerError("GetFileChunkByUUID", err)
}
return
}

fileChunk.CompletedParts = append(fileChunk.CompletedParts, strconv.Itoa(partNumber)+"-"+strings.Replace(etag, "\"", "", -1))

err = models.UpdateFileChunk(fileChunk)
if err != nil {
ctx.Error(500, fmt.Sprintf("UpdateFileChunk: %v", err))
return
}

ctx.JSON(200, map[string]string{
"result_code": "0",
})
}

func HandleUnDecompressAttachment() {
attachs, err := models.GetUnDecompressAttachments()
if err != nil {


+ 59
- 10
routers/repo/cloudbrain.go View File

@@ -40,13 +40,11 @@ const (
tplCloudBrainBenchmarkNew base.TplName = "repo/cloudbrain/benchmark/new"
tplCloudBrainBenchmarkShow base.TplName = "repo/cloudbrain/benchmark/show"

tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit"
tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit"

tplCloudBrainImageSubmit base.TplName = "repo/cloudbrain/image/submit"
tplCloudBrainImageEdit base.TplName = "repo/cloudbrain/image/edit"

tplCloudBrainTrainJobNew base.TplName = "repo/cloudbrain/trainjob/new"
tplCloudBrainTrainJobShow base.TplName = "repo/cloudbrain/trainjob/show"

)

var (
@@ -59,6 +57,7 @@ var (
)

const BENCHMARK_TYPE_CODE = "repo.cloudbrain.benchmark.types"
const CLONE_FILE_PREFIX = "file:///"

var benchmarkTypesMap = make(map[string]*models.BenchmarkTypes, 0)

@@ -600,6 +599,7 @@ func cloudBrainShow(ctx *context.Context, tpName base.TplName, jobType models.Jo
ctx.Data["dataset_path"] = cloudbrain.DataSetMountPath
ctx.Data["model_path"] = cloudbrain.ModelMountPath
ctx.Data["canDownload"] = cloudbrain.CanModifyJob(ctx, task)
ctx.Data["branchName"] = task.BranchName
ctx.HTML(200, tpName)
}

@@ -702,6 +702,53 @@ func CloudBrainCommitImageCheck(ctx *context.Context, form auth.CommitImageCloud

}

func CloudBrainAdminCommitImage(ctx *context.Context, form auth.CommitAdminImageCloudBrainForm) {

if !NamePattern.MatchString(form.Tag) {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
return
}

if utf8.RuneCountInString(form.Description) > 255 {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err", 255)))
return
}

validTopics, errMessage := checkTopics(form.Topics)
if errMessage != "" {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr(errMessage)))
return
}

err := cloudbrain.CommitAdminImage(models.CommitImageParams{
CommitImageCloudBrainParams: models.CommitImageCloudBrainParams{
ImageDescription: form.Description,
ImageTag: form.Tag,
},
IsPrivate: form.IsPrivate,
CloudBrainType: form.Type,
Topics: validTopics,
UID: ctx.User.ID,
Type: models.GetRecommondType(form.IsRecommend),
Place: form.Place,
})
if err != nil {
log.Error("CommitImagefailed")
if models.IsErrImageTagExist(err) {
ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_exist")))

} else if models.IsErrorImageCommitting(err) {
ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_committing")))
} else {
ctx.JSON(200, models.BaseErrorMessage(ctx.Tr("repo.image_commit_fail")))
}

return
}

ctx.JSON(200, models.BaseOKMessage)
}

func CloudBrainCommitImage(ctx *context.Context, form auth.CommitImageCloudBrainForm) {

if !NamePattern.MatchString(form.Tag) {
@@ -1142,7 +1189,8 @@ func GetRate(ctx *context.Context) {
}

func downloadCode(repo *models.Repository, codePath, branchName string) error {
if err := git.Clone(repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName}); err != nil {
//add "file:///" prefix to make the depth valid
if err := git.Clone(CLONE_FILE_PREFIX+repo.RepoPath(), codePath, git.CloneRepoOptions{Branch: branchName, Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repo.FullName(), err)
return err
}
@@ -1202,7 +1250,7 @@ func downloadRateCode(repo *models.Repository, taskName, rateOwnerName, rateRepo
return err
}

if err := git.Clone(repoExt.RepoPath(), codePath, git.CloneRepoOptions{}); err != nil {
if err := git.Clone(CLONE_FILE_PREFIX+repoExt.RepoPath(), codePath, git.CloneRepoOptions{Depth: 1}); err != nil {
log.Error("Failed to clone repository: %s (%v)", repoExt.FullName(), err)
return err
}
@@ -1353,11 +1401,11 @@ func SyncCloudbrainStatus() {
maxDuration = setting.MaxDuration
}

if task.Duration >= maxDuration {
log.Info("begin to stop job(%s), because of the duration", task.JobName)
if task.Duration >= maxDuration && task.JobType != string(models.JobTypeTrain) {
log.Info("begin to stop job(%s), because of the duration", task.DisplayJobName)
err = cloudbrain.StopJob(task.JobID)
if err != nil {
log.Error("StopJob(%s) failed:%v", task.JobName, err)
log.Error("StopJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
task.Status = string(models.JobStopped)
@@ -1367,7 +1415,8 @@ func SyncCloudbrainStatus() {
task.ComputeAndSetDuration()
err = models.UpdateJob(task)
if err != nil {
log.Error("UpdateJob(%s) failed:%v", task.JobName, err)
log.Error("UpdateJob(%s) failed:%v", task.DisplayJobName, err)
continue
}
}
}


+ 3
- 0
routers/repo/dataset.go View File

@@ -358,6 +358,7 @@ func MyDatasets(ctx *context.Context) {
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
@@ -398,6 +399,7 @@ func PublicDataset(ctx *context.Context) {
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)
@@ -454,6 +456,7 @@ func MyFavoriteDataset(ctx *context.Context) {
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
RecommendOnly: ctx.QueryBool("recommend"),
})
if err != nil {
ctx.ServerError("datasets", err)


+ 4
- 0
routers/repo/http.go View File

@@ -256,6 +256,10 @@ func HTTP(ctx *context.Context) {
models.EnvPusherName + "=" + authUser.Name,
models.EnvPusherID + fmt.Sprintf("=%d", authUser.ID),
models.EnvIsDeployKey + "=false",
models.EnvRepoSize + "=" + fmt.Sprint(repo.Size),
models.EnvRepoMaxFileSize + "=" + fmt.Sprint(setting.Repository.Upload.FileMaxSize),
models.EnvRepoMaxSize + "=" + fmt.Sprint(setting.Repository.RepoMaxSize),
models.EnvPushSizeCheckFlag + "=" + fmt.Sprint(setting.Repository.Upload.ShellFlag),
}
if !authUser.KeepEmailPrivate {
environ = append(environ, models.EnvPusherEmail+"="+authUser.Email)


+ 30
- 1
routers/repo/issue.go View File

@@ -432,7 +432,7 @@ func RetrieveRepoMetas(ctx *context.Context, repo *models.Repository, isPull boo
return nil
}

brs, _, err := ctx.Repo.GitRepo.GetBranches(0,0)
brs, _, err := ctx.Repo.GitRepo.GetBranches(0, 0)
if err != nil {
ctx.ServerError("GetBranches", err)
return nil
@@ -1302,6 +1302,35 @@ func UpdateIssueContent(ctx *context.Context) {
})
}

// UpdateIssueRef change issue's code reference
func UpdateIssueRef(ctx *context.Context) {
issues := getActionIssues(ctx)
if ctx.Written() {
return
}

issue := issues[0]
if issue == nil {
log.Error("UpdateIssueRef param error ")
return
}

if !ctx.IsSigned || (ctx.User.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
ctx.Error(403)
return
}

ref := ctx.Query("id")
if err := issue_service.ChangeRef(issue, ctx.User, ref); err != nil {
ctx.ServerError("ChangeRef", err)
return
}

ctx.JSON(200, map[string]interface{}{
"ref": issue.Ref,
})
}

// UpdateIssueMilestone change issue's milestone
func UpdateIssueMilestone(ctx *context.Context) {
issues := getActionIssues(ctx)


+ 16
- 17
routers/repo/modelarts.go View File

@@ -51,6 +51,9 @@ const (

func DebugJobIndex(ctx *context.Context) {
listType := ctx.Query("debugListType")
if listType == "" {
listType = models.AllResource
}
ctx.Data["ListType"] = listType
MustEnableCloudbrain(ctx)
repo := ctx.Repo.Repository
@@ -247,7 +250,9 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm
func NotebookShow(ctx *context.Context) {
ctx.Data["PageIsCloudBrain"] = true
debugListType := ctx.Query("debugListType")

if debugListType == "" {
debugListType = "all"
}
var ID = ctx.Params(":id")
task, err := models.GetCloudbrainByIDWithDeleted(ID)
if err != nil {
@@ -1027,10 +1032,8 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm)
gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)

if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("downloadCode failed, server timed out: %s (%v)", repo.FullName(), err)
trainJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsTrainJobNew, &form)
return
@@ -1245,9 +1248,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ

gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)
if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("Failed git clone repo to local(!: %s (%v)", repo.FullName(), err)
versionErrorDataPrepare(ctx, form)
ctx.RenderWithErr("Failed git clone repo to local!", tplModelArtsTrainJobVersionNew, &form)
@@ -1475,9 +1476,9 @@ func paramCheckCreateTrainJob(form auth.CreateModelArtsTrainJobForm) error {
return errors.New("启动文件必须是python文件")
}

if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 {
log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber)
return errors.New("计算节点数必须在1-25之间")
if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 {
log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber)
return errors.New("计算节点数必须在1-2之间")
}
if form.BranchName == "" {
log.Error("the branch must not be null!", form.BranchName)
@@ -1493,9 +1494,9 @@ func paramCheckCreateInferenceJob(form auth.CreateModelArtsInferenceJobForm) err
return errors.New("启动文件必须是python文件")
}

if form.WorkServerNumber > 25 || form.WorkServerNumber < 1 {
log.Error("the WorkServerNumber(%d) must be in (1,25)", form.WorkServerNumber)
return errors.New("计算节点数必须在1-25之间")
if form.WorkServerNumber > 2 || form.WorkServerNumber < 1 {
log.Error("the WorkServerNumber(%d) must be in (1,2)", form.WorkServerNumber)
return errors.New("计算节点数必须在1-2之间")
}

if form.ModelName == "" {
@@ -1874,9 +1875,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference
gitRepo, _ := git.OpenRepository(repo.RepoPath())
commitID, _ := gitRepo.GetBranchCommitID(branch_name)

if err := git.Clone(repo.RepoPath(), codeLocalPath, git.CloneRepoOptions{
Branch: branch_name,
}); err != nil {
if err := downloadCode(repo, codeLocalPath, branch_name); err != nil {
log.Error("Create task failed, server timed out: %s (%v)", repo.FullName(), err)
inferenceJobErrorNewDataPrepare(ctx, form)
ctx.RenderWithErr("Create task failed, server timed out", tplModelArtsInferenceJobNew, &form)


+ 130
- 99
routers/repo/user_data_analysis.go View File

@@ -19,6 +19,130 @@ const (
PAGE_SIZE = 2000
)

func getExcelHeader(ctx *context.Context) map[string]string {
excelHeader := make([]string, 0)
excelHeader = append(excelHeader, ctx.Tr("user.static.id"))
excelHeader = append(excelHeader, ctx.Tr("user.static.name"))
excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndex"))
excelHeader = append(excelHeader, ctx.Tr("user.static.UserIndexPrimitive"))
excelHeader = append(excelHeader, ctx.Tr("user.static.codemergecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commitcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.issuecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commentcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.focusrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.starrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.logincount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.watchedcount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.commitcodesize"))
excelHeader = append(excelHeader, ctx.Tr("user.static.solveissuecount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.encyclopediascount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.createrepocount"))
excelHeader = append(excelHeader, ctx.Tr("user.static.openiindex"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainTaskNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CloudBrainRunTime"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitDatasetNum"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CommitModelCount"))

excelHeader = append(excelHeader, ctx.Tr("user.static.FocusOtherUser"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendDataset"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectImage"))
excelHeader = append(excelHeader, ctx.Tr("user.static.CollectedImage"))
excelHeader = append(excelHeader, ctx.Tr("user.static.RecommendImage"))

excelHeader = append(excelHeader, ctx.Tr("user.static.registdate"))
excelHeader = append(excelHeader, ctx.Tr("user.static.countdate"))

excelHeaderMap := make(map[string]string, 0)
var i byte
i = 0
for _, value := range excelHeader {
excelColumn := getColumn(i) + fmt.Sprint(1)
log.Info("excelColumn=" + excelColumn)
excelHeaderMap[excelColumn] = value
i++
}
return excelHeaderMap
}

func writeExcel(row int, xlsx *excelize.File, sheetName string, userRecord *models.UserBusinessAnalysisAll) {
rows := fmt.Sprint(row)
var tmp byte
tmp = 0
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.ID)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.Name)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.UserIndexPrimitive))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CodeMergeCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.IssueCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommentCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.StarRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.LoginCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.WatchedCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitCodeSize)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.SolveIssueCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.EncyclopediasCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CreateRepoCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CloudBrainTaskNum)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitDatasetNum)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CommitModelCount)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.FocusOtherUser)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendDataset)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectImage)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.CollectedImage)
tmp = tmp + 1
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, userRecord.RecommendImage)
tmp = tmp + 1
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime[0:len(formatTime)-3])
tmp = tmp + 1

formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, getColumn(tmp)+rows, formatTime)
}
func getColumn(tmp byte) string {
var tmpA byte
tmpA = 'A'
if tmp < 26 {
return string(tmpA + tmp)
} else {
return "A" + string(tmpA+(tmp-26))
}
}

func queryUserDataPage(ctx *context.Context, tableName string, queryObj interface{}) {
page := ctx.QueryInt("page")
if page <= 0 {
@@ -37,30 +161,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
sheetName := ctx.Tr("user.static.sheetname")
index := xlsx.NewSheet(sheetName)
xlsx.DeleteSheet("Sheet1")
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.codemergecount"),
"D1": ctx.Tr("user.static.UserIndex"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
"H1": ctx.Tr("user.static.focusrepocount"),
"I1": ctx.Tr("user.static.starrepocount"),
"J1": ctx.Tr("user.static.logincount"),
"K1": ctx.Tr("user.static.watchedcount"),
"L1": ctx.Tr("user.static.commitcodesize"),
"M1": ctx.Tr("user.static.solveissuecount"),
"N1": ctx.Tr("user.static.encyclopediascount"),
"O1": ctx.Tr("user.static.createrepocount"),
"P1": ctx.Tr("user.static.openiindex"),
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
"R1": ctx.Tr("user.static.CloudBrainRunTime"),
"S1": ctx.Tr("user.static.CommitDatasetNum"),
"T1": ctx.Tr("user.static.CommitModelCount"),
"U1": ctx.Tr("user.static.registdate"),
"V1": ctx.Tr("user.static.countdate"),
}
dataHeader := getExcelHeader(ctx)
for k, v := range dataHeader {
//设置单元格的值
xlsx.SetCellValue(sheetName, k, v)
@@ -74,31 +175,7 @@ func queryUserDataPage(ctx *context.Context, tableName string, queryObj interfac
log.Info("return count=" + fmt.Sprint(count))
for _, userRecord := range re {
row++
rows := fmt.Sprint(row)
xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)
xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount)
xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount)
xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount)
xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount)
xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize)
xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount)
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount)
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount)
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3])
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
writeExcel(row, xlsx, sheetName, userRecord)
}

indexTotal += PAGE_SIZE
@@ -236,62 +313,16 @@ func QueryUserStaticDataPage(ctx *context.Context) {
sheetName := ctx.Tr("user.static.sheetname")
index := xlsx.NewSheet(sheetName)
xlsx.DeleteSheet("Sheet1")
dataHeader := map[string]string{
"A1": ctx.Tr("user.static.id"),
"B1": ctx.Tr("user.static.name"),
"C1": ctx.Tr("user.static.codemergecount"),
"D1": ctx.Tr("user.static.UserIndex"),
"E1": ctx.Tr("user.static.commitcount"),
"F1": ctx.Tr("user.static.issuecount"),
"G1": ctx.Tr("user.static.commentcount"),
"H1": ctx.Tr("user.static.focusrepocount"),
"I1": ctx.Tr("user.static.starrepocount"),
"J1": ctx.Tr("user.static.logincount"),
"K1": ctx.Tr("user.static.watchedcount"),
"L1": ctx.Tr("user.static.commitcodesize"),
"M1": ctx.Tr("user.static.solveissuecount"),
"N1": ctx.Tr("user.static.encyclopediascount"),
"O1": ctx.Tr("user.static.createrepocount"),
"P1": ctx.Tr("user.static.openiindex"),
"Q1": ctx.Tr("user.static.CloudBrainTaskNum"),
"R1": ctx.Tr("user.static.CloudBrainRunTime"),
"S1": ctx.Tr("user.static.CommitDatasetNum"),
"T1": ctx.Tr("user.static.CommitModelCount"),
"U1": ctx.Tr("user.static.registdate"),
"V1": ctx.Tr("user.static.countdate"),
}

dataHeader := getExcelHeader(ctx)
for k, v := range dataHeader {
//设置单元格的值
xlsx.SetCellValue(sheetName, k, v)
}

for i, userRecord := range re {
rows := fmt.Sprint(i + 2)

xlsx.SetCellValue(sheetName, "A"+rows, userRecord.ID)
xlsx.SetCellValue(sheetName, "B"+rows, userRecord.Name)
xlsx.SetCellValue(sheetName, "C"+rows, userRecord.CodeMergeCount)
xlsx.SetCellValue(sheetName, "D"+rows, fmt.Sprintf("%.2f", userRecord.UserIndex))
xlsx.SetCellValue(sheetName, "E"+rows, userRecord.CommitCount)
xlsx.SetCellValue(sheetName, "F"+rows, userRecord.IssueCount)
xlsx.SetCellValue(sheetName, "G"+rows, userRecord.CommentCount)
xlsx.SetCellValue(sheetName, "H"+rows, userRecord.FocusRepoCount)
xlsx.SetCellValue(sheetName, "I"+rows, userRecord.StarRepoCount)
xlsx.SetCellValue(sheetName, "J"+rows, userRecord.LoginCount)
xlsx.SetCellValue(sheetName, "K"+rows, userRecord.WatchedCount)
xlsx.SetCellValue(sheetName, "L"+rows, userRecord.CommitCodeSize)
xlsx.SetCellValue(sheetName, "M"+rows, userRecord.SolveIssueCount)
xlsx.SetCellValue(sheetName, "N"+rows, userRecord.EncyclopediasCount)
xlsx.SetCellValue(sheetName, "O"+rows, userRecord.CreateRepoCount)
xlsx.SetCellValue(sheetName, "P"+rows, fmt.Sprintf("%.2f", userRecord.OpenIIndex))
xlsx.SetCellValue(sheetName, "Q"+rows, userRecord.CloudBrainTaskNum)
xlsx.SetCellValue(sheetName, "R"+rows, fmt.Sprintf("%.2f", float64(userRecord.CloudBrainRunTime)/3600))
xlsx.SetCellValue(sheetName, "S"+rows, userRecord.CommitDatasetNum)
xlsx.SetCellValue(sheetName, "T"+rows, userRecord.CommitModelCount)
formatTime := userRecord.RegistDate.Format("2006-01-02 15:04:05")
xlsx.SetCellValue(sheetName, "U"+rows, formatTime[0:len(formatTime)-3])
formatTime = userRecord.DataDate
xlsx.SetCellValue(sheetName, "V"+rows, formatTime)
row := i + 2
writeExcel(row, xlsx, sheetName, userRecord)
}

//设置默认打开的表单


+ 5
- 2
routers/routes/routes.go View File

@@ -525,6 +525,7 @@ func RegisterRoutes(m *macaron.Macaron) {

m.Group("/datasets", func() {
m.Get("", admin.Datasets)
m.Put("/:id/action/:action", admin.DatasetAction)
// m.Post("/delete", admin.DeleteDataset)
})
m.Group("/cloudbrains", func() {
@@ -534,6 +535,8 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Group("/images", func() {
m.Get("", admin.Images)
m.Get("/data", repo.GetAllImages)
m.Get("/commit_image", admin.CloudBrainCommitImageShow)
m.Post("/commit_image", bindIgnErr(auth.CommitAdminImageCloudBrainForm{}), repo.CloudBrainAdminCommitImage)
})
m.Put("/image/:id/action/:action", image.Action)

@@ -608,12 +611,11 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Put("/obs_proxy_multipart", repo.PutOBSProxyUpload)
m.Get("/obs_proxy_download", repo.GetOBSProxyDownload)
m.Get("/get_multipart_url", repo.GetMultipartUploadUrl)
m.Post("/complete_multipart", repo.CompleteMultipart)
m.Post("/update_chunk", repo.UpdateMultipart)
}, reqSignIn)

m.Group("/attachments", func() {
m.Post("/decompress_done_notify", repo.UpdateAttachmentDecompressState)
m.Post("/complete_multipart", repo.CompleteMultipart)
})

m.Group("/attachments", func() {
@@ -890,6 +892,7 @@ func RegisterRoutes(m *macaron.Macaron) {
m.Post("/labels", reqRepoIssuesOrPullsWriter, repo.UpdateIssueLabel)
m.Post("/milestone", reqRepoIssuesOrPullsWriter, repo.UpdateIssueMilestone)
m.Post("/assignee", reqRepoIssuesOrPullsWriter, repo.UpdateIssueAssignee)
m.Post("/ref", reqRepoIssuesOrPullsWriter, repo.UpdateIssueRef)
m.Post("/request_review", reqRepoIssuesOrPullsReader, repo.UpdatePullReviewRequest)
m.Post("/status", reqRepoIssuesOrPullsWriter, repo.UpdateIssueStatus)
m.Post("/resolve_conversation", reqRepoIssuesOrPullsReader, repo.UpdateResolveConversation)


+ 17
- 14
routers/search.go View File

@@ -183,7 +183,7 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int)
topicsQuery := elastic.NewMatchQuery("topics", Key)
boolQ.Should(topicsQuery)

res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context())
res, err := client.Search("repository-es-index").Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("topics")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -200,15 +200,18 @@ func searchRepoByLabel(ctx *context.Context, Key string, Page int, PageSize int)
}
}

func getSort(SortBy string, ascending bool) elastic.Sorter {
var sort elastic.Sorter
sort = elastic.NewScoreSort()
if SortBy != "" {
if SortBy == "default" {
return sort
func getSort(SortBy string, ascending bool, secondSortBy string, secondAscending bool) []elastic.Sorter {
sort := make([]elastic.Sorter, 0)
if SortBy == "default" || SortBy == "" {
sort = append(sort, elastic.NewScoreSort())
if secondSortBy != "" {
log.Info("SortBy=" + SortBy + " secondSortBy=" + secondSortBy)
sort = append(sort, elastic.NewFieldSort(secondSortBy).Order(secondAscending))
}
return elastic.NewFieldSort(SortBy).Order(ascending)
} else {
sort = append(sort, elastic.NewFieldSort(SortBy).Order(ascending))
}
log.Info("sort size=" + fmt.Sprint(len(sort)))
return sort
}

@@ -308,7 +311,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa
topicsQuery := elastic.NewMatchQuery("topics", Key).Boost(1).QueryName("f_third")
boolQ.Should(nameQuery, descriptionQuery, topicsQuery)

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -330,7 +333,7 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa
} else {
log.Info("query all content.")
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -691,7 +694,7 @@ func searchUserOrOrg(ctx *context.Context, TableName string, Key string, Page in
boolQ.Must(UserOrOrgQuery)
}

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From((Page - 1) * PageSize).Size(PageSize).Highlight(queryHighlight("name", "full_name", "description")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -849,7 +852,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int,
fileNameQuery := elastic.NewMatchQuery("file_name", Key).Boost(1).QueryName("f_third")
categoryQuery := elastic.NewMatchQuery("category", Key).Boost(1).QueryName("f_fourth")
boolQ.Should(nameQuery, descQuery, categoryQuery, fileNameQuery)
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("title", "description", "file_name", "category")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -864,7 +867,7 @@ func searchDataSet(ctx *context.Context, TableName string, Key string, Page int,
} else {
log.Info("query all datasets.")
//搜索的属性要指定{"timestamp":{"unmapped_type":"date"}}
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Do(ctx.Req.Context())
res, err := client.Search(TableName).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
@@ -1057,7 +1060,7 @@ func searchIssueOrPr(ctx *context.Context, TableName string, Key string, Page in
boolQ.Must(isIssueQuery)
}

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending)).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context())
res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "updated_unix.keyword", false)...).From(from).Size(Size).Highlight(queryHighlight("name", "content", "comment")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))


+ 5
- 3
routers/user/profile.go View File

@@ -106,9 +106,9 @@ func Profile(ctx *context.Context) {

for _, org := range orgs {
_, repoCount, err := models.SearchRepository(&models.SearchRepoOptions{
OwnerID: org.ID,
Private: ctx.IsSigned,
Actor: ctx.User,
OwnerID: org.ID,
Private: ctx.IsSigned,
Actor: ctx.User,
})
if err != nil {
ctx.ServerError("SearchRepository", err)
@@ -175,6 +175,8 @@ func Profile(ctx *context.Context) {
orderBy = models.SearchOrderByAlphabeticallyReverse
case "alphabetically":
orderBy = models.SearchOrderByAlphabetically
case "downloadtimes":
orderBy = models.SearchOrderByDownloadTimes
case "moststars":
orderBy = models.SearchOrderByStarsReverse
case "feweststars":


+ 9
- 0
services/issue/content.go View File

@@ -21,3 +21,12 @@ func ChangeContent(issue *models.Issue, doer *models.User, content string) (err

return nil
}

// ChangeRef changes issue ref, as the given user.
func ChangeRef(issue *models.Issue, doer *models.User, ref string) (err error) {
if err := issue.ChangeRef(doer, ref); err != nil {
return err
}

return nil
}

+ 1
- 1
services/socketwrap/clientManager.go View File

@@ -10,7 +10,7 @@ import (
"github.com/elliotchance/orderedmap"
)

var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 25, 26, 27, 28, 29, 30, 31}
var opTypes = []int{1, 2, 5, 6, 7, 9, 10, 11, 12, 13, 14, 15, 17, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31}

type ClientsManager struct {
Clients *orderedmap.OrderedMap


+ 128
- 0
templates/admin/cloudbrain/imagecommit.tmpl View File

@@ -0,0 +1,128 @@
<style>
.label_color{
color:#505559 !important;
width: 6% !important;
text-align: center;
}
</style>
{{template "base/head" .}}
<div id="mask">
<div id="loadingPage">
<div class="rect1"></div>
<div class="rect2"></div>
<div class="rect3"></div>
<div class="rect4"></div>
<div class="rect5"></div>
</div>
</div>
<div class="repository">
{{template "repo/header" .}}
<div class="alert"></div>
<div class="ui container">
<div>
<div class="ui negative message" style="display: none;">
</div>
<div class="ui info message" style="display: none;">
</div>
<div class="ui positive message" style="display: none;">
</div>
<h4 class="ui top attached header">
{{.i18n.Tr "repo.submit_image"}}
</h4>
<div class="submit-image-tmplvalue" style="display: none;" data-link="{{$.Link}}" data-edit-page="{{.PageIsAdminImages}}"></div>
<div class="ui attached segment" style="padding: 2em 3em;padding-bottom: 7rem;">
<div class="ui form" id="form_image">
<input type="hidden" name="edit" value="edit">
{{.CsrfTokenHtml}}
<div class="inline field">
<label class="label_color" for="">{{$.i18n.Tr "dataset.dataset_available_clusters"}}</label>
<div class="ui basic label" style="border: none !important;color:#3291f8;">
<svg class="svg" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="14" height="14"><path fill="none" d="M0 0h24v24H0z"></path><path d="M4 3h16a1 1 0 0 1 1 1v7H3V4a1 1 0 0 1 1-1zM3 13h18v7a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1v-7zm4 3v2h3v-2H7zM7 6v2h3V6H7z"></path></svg>
CPU/GPU
</div>
<input type="hidden" value="{{.Type}}" name="type">
</div>
<div class="inline required field">
<label class="label_color" for="">{{$.i18n.Tr "repo.images.name"}}</label>
<input type="text" name="tag" required placeholder="{{$.i18n.Tr "repo.images.name_placerholder"}}" style="width: 80%;" maxlength="100">
<span class="tooltips" style="display: block;padding-left: 0.5rem;">{{.i18n.Tr "repo.images.name_rule"}}</span>
</div>
<div class="inline required field">
<label class="label_color" for="">{{$.i18n.Tr "repo.images"}}</label>
<input type="text" name="place" required placeholder="{{$.i18n.Tr "cloudbrain.input_mirror"}}" style="width: 80%;" maxlength="100">
</div>
<div class="inline required field">
<label class="label_color" for="">{{$.i18n.Tr "dataset.description"}}</label>
<textarea style="width: 80%;" required id="description" name="description" rows="3" maxlength="255" placeholder={{.i18n.Tr "repo.modelarts.train_job.new_place"}} onchange="this.value=this.value.substring(0, 255)" onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)"></textarea>
</div>
<div class="inline field" style="display: flex;align-items: center;">
<label class="label_color" for="">{{$.i18n.Tr "repo.model.manage.label"}}</label>&nbsp;
<div class="ui multiple search selection dropdown" id="dropdown_image" style="width: 80%;">
<input type="hidden" name="topics" value="" required>
<div class="default text" id="default_text">{{.i18n.Tr "repo.repo_label_helpe"}}</div>
<div class="menu" id="course_label_item"></div>
</div>
</div>
<span class="tooltips" style="display: block;padding-left: 0.5rem;margin-top: 0.5rem;margin-bottom: 1rem;">{{.i18n.Tr "repo.image.label_tooltips"}}</span>
<div class="inline fields">
<label class="label_color" for="" style="visibility: hidden;"></label>
<div class="field">
<div class="ui radio checkbox">
<input type="radio" name="isRecommend" checked="checked" value="true">
<label>{{.i18n.Tr "admin.images.recommend"}}</label>
</div>
</div>
<div class="field" style="flex: 0.15;">
<div class="ui radio checkbox" >
<input type="radio" name="isRecommend" value="false">
<label>{{.i18n.Tr "admin.images.unrecommend"}}</label>
</div>
</div>
</div>
<div class="inline fields">
<label class="label_color" for="" style="visibility: hidden;"></label>
<div class="field">
<div class="ui radio checkbox">
<input type="radio" name="isPrivate" checked="checked" value="false">
<label>{{.i18n.Tr "org.settings.visibility.public"}}</label>
</div>
</div>
<div class="field" style="flex: 0.15;">
<div class="ui radio checkbox" >
<input type="radio" name="isPrivate" value="true">
<label>{{.i18n.Tr "home.show_private"}}</label>
</div>
</div>
<div class="field">
<span class="label_color">{{.i18n.Tr "repo.images.public_tooltips"}}</span>
</div>
</div>
<div class="inline required field" style="padding-top: 2rem;">
<label class="label_color" for="" style="visibility: hidden;"></label>
<button class="ui create_image green button" type="button">
{{.i18n.Tr "repo.cloudbrain.commit_image"}}
</button>
<a class="ui button" id="cancel_submit_image">{{.i18n.Tr "repo.cloudbrain.cancel"}}</a>
</div>
</div>
</div>
</div>
</div>
</div>

<!-- 确认模态框 -->
<div>
<div class="ui modal image_confirm_submit">
<div class="header">{{.i18n.Tr "repo.submit_image"}}</div>
<div class="content text red center">
<p><i class="exclamation icon"></i>{{.i18n.Tr "repo.image_overwrite"}}</p>
</div>
<div class="actions">
<button class="ui deny small button">{{.i18n.Tr "cloudbrain.operate_cancel"}}</button>
<button class="ui green small approve button">{{.i18n.Tr "cloudbrain.operate_confirm"}}</button>
</div>
</div>
</div>
{{template "base/footer" .}}

templates/admin/cloudbrain/images.html → templates/admin/cloudbrain/images.tmpl View File


+ 13
- 14
templates/admin/dataset/list.tmpl View File

@@ -3,12 +3,23 @@
{{template "admin/navbar" .}}
<div class="ui container">
{{template "base/alert" .}}
<div class="ui negative message" style="display: none;">
</div>
<h4 class="ui top attached header">
{{.i18n.Tr "admin.datasets.dataset_manage_panel"}} ({{.i18n.Tr "admin.total" .Total}})
</h4>
<div class="ui attached segment">
{{template "admin/dataset/search" .}}
</div>
<div class="ui attached segment">
<div class="ui ten wide column">
<div class="ui checkbox" id="dataset_check">
<input type="checkbox">
<label>{{.i18n.Tr "admin.datasets.only_recommend"}}</label>
</div>
</div>
</div>
<div class="ui attached table segment">
<table class="ui very basic striped table">
<thead>
@@ -24,10 +35,10 @@
{{range .Datasets}}
<tr>
<td>{{.ID}}</td>
<td><a href="{{AppSubUrl}}/">{{.Title}}</a></td>
<td style="display: flex;align-items: center;"><a href="{{AppSubUrl}}/{{.Repo.OwnerName}}/{{.Repo.Alias}}/datasets">{{.Title}}</a>{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</td>
<td><i class="fa fa{{if .IsPrivate}}-check{{end}}-square-o"></i></td>
<td><span title="{{.CreatedUnix.FormatLong}}">{{.CreatedUnix.FormatShort}}</span></td>
<td></td>
<td>{{if .Recommend}}<span class="set_dataset" style="color: rgb(250, 140, 22);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/unrecommend">{{$.i18n.Tr "admin.datasets.unrecommend"}}</span>{{else}}<span class="set_dataset" style="color: rgb(19, 194, 141);cursor: pointer;" data-url="{{$.Link}}/{{.ID}}/action/recommend">{{$.i18n.Tr "admin.datasets.recommend"}}</span>{{end}}</td>
</tr>
{{end}}
</tbody>
@@ -37,16 +48,4 @@
{{template "base/paginate" .}}
</div>
</div>

<div class="ui small basic delete modal">
<div class="ui icon header">
<i class="trash icon"></i>
{{.i18n.Tr "dataset.settings.delete"}}
</div>
<div class="content">
<p>{{.i18n.Tr "dataset.settings.delete_desc"}}</p>
{{.i18n.Tr "dataset.settings.delete_notices_2" `<span class="name"></span>` | Safe}}<br>
</div>
{{template "base/delete_modal_actions" .}}
</div>
{{template "base/footer" .}}

+ 13
- 13
templates/admin/dataset/search.tmpl View File

@@ -6,18 +6,18 @@
<i class="dropdown icon"></i>
</span>
<div class="menu">
<a class='{{if or (eq .SortType "oldest") (not .SortType)}}active{{end}} item' href='{{$.Link}}?sort=oldest&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a>
<a class='{{if eq .SortType "newest"}}active{{end}} item' href='{{$.Link}}?sort=newest&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a>
<a class='{{if eq .SortType "alphabetically"}}active{{end}} item' href='{{$.Link}}?sort=alphabetically&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.alphabetically"}}</a>
<a class='{{if eq .SortType "reversealphabetically"}}active{{end}} item' href='{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a>
<a class='{{if eq .SortType "recentupdate"}}active{{end}} item' href='{{$.Link}}?sort=recentupdate&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a>
<a class='{{if eq .SortType "leastupdate"}}active{{end}} item' href='{{$.Link}}?sort=leastupdate&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a>
<a class='{{if eq .SortType "moststars"}}active{{end}} item' href='{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a>
<a class='{{if eq .SortType "feweststars"}}active{{end}} item' href='{{$.Link}}?sort=feweststars&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.feweststars"}}</a>
<a class='{{if eq .SortType "mostforks"}}active{{end}} item' href='{{$.Link}}?sort=mostforks&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.mostforks"}}</a>
<a class='{{if eq .SortType "fewestforks"}}active{{end}} item' href='{{$.Link}}?sort=fewestforks&q={{$.Keyword}}&tab={{$.TabName}}'>{{.i18n.Tr "repo.issues.filter_sort.fewestforks"}}</a>
<a class='{{if eq .SortType "size"}}active{{end}} item' href='{{$.Link}}?sort=size&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.by_size"}}</a>
<a class='{{if eq .SortType "reversesize"}}active{{end}} item' href='{{$.Link}}?sort=reversesize&q={{$.Keyword}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_by_size"}}</a>
<a class='{{if or (eq .SortType "oldest") (not .SortType)}}active{{end}} item' href='{{$.Link}}?sort=oldest&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a>
<a class='{{if eq .SortType "newest"}}active{{end}} item' href='{{$.Link}}?sort=newest&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a>
<a class='{{if eq .SortType "alphabetically"}}active{{end}} item' href='{{$.Link}}?sort=alphabetically&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.alphabetically"}}</a>
<a class='{{if eq .SortType "reversealphabetically"}}active{{end}} item' href='{{$.Link}}?sort=reversealphabetically&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_alphabetically"}}</a>
<a class='{{if eq .SortType "recentupdate"}}active{{end}} item' href='{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a>
<a class='{{if eq .SortType "leastupdate"}}active{{end}} item' href='{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a>
<a class='{{if eq .SortType "moststars"}}active{{end}} item' href='{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a>
<a class='{{if eq .SortType "feweststars"}}active{{end}} item' href='{{$.Link}}?sort=feweststars&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.feweststars"}}</a>
<a class='{{if eq .SortType "mostforks"}}active{{end}} item' href='{{$.Link}}?sort=mostforks&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.mostforks"}}</a>
<a class='{{if eq .SortType "fewestforks"}}active{{end}} item' href='{{$.Link}}?sort=fewestforks&q={{$.Keyword}}&tab={{$.TabName}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.filter_sort.fewestforks"}}</a>
<a class='{{if eq .SortType "size"}}active{{end}} item' href='{{$.Link}}?sort=size&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.by_size"}}</a>
<a class='{{if eq .SortType "reversesize"}}active{{end}} item' href='{{$.Link}}?sort=reversesize&q={{$.Keyword}}&recommend={{$.Recommend}}'>{{.i18n.Tr "repo.issues.label.filter_sort.reverse_by_size"}}</a>
</div>
</div>
</div>
@@ -26,4 +26,4 @@
<input name="q" value="{{.Keyword}}" placeholder='{{.i18n.Tr "explore.search"}}...' autofocus>
<button class="ui blue button">{{.i18n.Tr "explore.search"}}</button>
</div>
</form>
</form>

+ 4
- 4
templates/custom/select_dataset.tmpl View File

@@ -23,7 +23,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -49,7 +49,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -74,7 +74,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -99,7 +99,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">


+ 4
- 4
templates/custom/select_dataset_train.tmpl View File

@@ -23,7 +23,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.current_project"}}" name="first">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in currentRepoDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><span class="panel_dataset_name">${dataset.Name} </span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias} </span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name} </span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -49,7 +49,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.owner_dataset"}}" name="second">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -74,7 +74,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.public_dataset"}}" name="third">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in publicDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">
@@ -99,7 +99,7 @@
<el-tab-pane label="{{.i18n.Tr "dataset.I_liked"}}" name="fourth">
<div style="display: flex;align-items: center;justify-content: space-between;padding: 1rem 0;border-bottom:1px solid #F5F5F5" v-for="(dataset,index) in myFavoriteDataset" :key="index">
<div style="width: 90%;">
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="display: flex;align-items: center;"><span class="panel_creator_reponam">${dataset.Repo.OwnerName}/${dataset.Repo.Alias}</span><img v-if="dataset.Recommend" src="/img/jian.svg" style="margin-left: 0.5rem;"><span class="panel_dataset_name">${dataset.Name}</span></div>
<div style="margin-top: 8px;display: flex;">
<a :title="dataset.UserName" style="cursor: default;">
<img class="ui avatar mini image" style="width: 20px;height: 20px;" :src="dataset.RelAvatarLink">


+ 13
- 6
templates/explore/datasets.tmpl View File

@@ -121,11 +121,13 @@
<i class="dropdown icon"></i>
</span>
<div class="menu">
<a class="{{if eq .SortType "newest"}}active{{end}} item" href="{{$.Link}}?sort=newest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a>
<a class="{{if eq .SortType "oldest"}}active{{end}} item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a>
<a class="{{if eq .SortType "recentupdate"}}active{{end}} item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a>
<a class="{{if eq .SortType "leastupdate"}}active{{end}} item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}">{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a>
<!-- <a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a> -->
<a class="{{if eq .SortType "default"}}active{{end}} item" href="{{$.Link}}?sort=default&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.default"}}</a>
<a class="{{if eq .SortType "newest"}}active{{end}} item" href="{{$.Link}}?sort=newest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.latest"}}</a>
<a class="{{if eq .SortType "oldest"}}active{{end}} item" href="{{$.Link}}?sort=oldest&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.oldest"}}</a>
<a class="{{if eq .SortType "recentupdate"}}active{{end}} item" href="{{$.Link}}?sort=recentupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.recentupdate"}}</a>
<a class="{{if eq .SortType "leastupdate"}}active{{end}} item" href="{{$.Link}}?sort=leastupdate&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.leastupdate"}}</a>
<a class="{{if eq .SortType "downloadtimes"}}active{{end}} item" href="{{$.Link}}?sort=downloadtimes&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}</a>
<a class="{{if eq .SortType "moststars"}}active{{end}} item" href="{{$.Link}}?sort=moststars&q={{$.Keyword}}&tab={{$.TabName}}&category={{$.Category}}&task={{$.Task}}&license={{$.License}}&recommend={{$.Recommend}}">{{.i18n.Tr "repo.issues.filter_sort.moststars"}}</a>
</div>
</div>
</div>
@@ -139,6 +141,7 @@
{{end}}

<div class="ui row" style="clear: both;" id="dataset-base">
<el-checkbox v-model="checked" style="padding: 0.5rem 1rem;" @change="handleCheckedChange" >仅显示平台推荐</el-checkbox>
<div class="ui two cards">
{{range $k, $v :=.Datasets}}
<div class="ui card" @click="gotoDataset('{{.Repo.Link}}/datasets')" style="cursor: pointer;box-shadow: 0px 4px 4px 0px rgba(232,232,232,0.6);border: 1px solid rgba(232, 232, 232, 1);">
@@ -147,6 +150,8 @@
<a href="{{.Repo.Link}}/datasets" style="font-size: 12px;color: #3291F8;height: 24px;">{{.Repo.OwnerName}} / {{.Repo.Alias}}</a>
{{if $.IsSigned}}
<span style="display: flex;align-items: center;justify-content: flex-end;cursor: pointer;" @click.stop="postSquareStar({{.ID}},'{{.Repo.Link}}/datasets',{{$k}})">
<span style="line-height: 1;color: #101010;margin-bottom: -2px;"><i class="ri-download-line" style="font-size: 1.3em;"></i></span>
<span style="line-height: 1;color: #101010;margin-right: 0.6rem;">{{.DownloadTimes}}</span>
<div style="line-height: 1;margin-right: 4px;margin-bottom: -2px;">
<svg width="1.4em" height="1.4em" viewBox="0 0 32 32" class="heart-stroke" :class='{stars_active:starActives[{{$k}}]}'><path d="M4.4 6.54c-1.761 1.643-2.6 3.793-2.36 6.056.24 2.263 1.507 4.521 3.663 6.534a29110.9 29110.9 0 0010.296 9.633l10.297-9.633c2.157-2.013 3.424-4.273 3.664-6.536.24-2.264-.599-4.412-2.36-6.056-1.73-1.613-3.84-2.29-6.097-1.955-1.689.25-3.454 1.078-5.105 2.394l-.4.319-.398-.319c-1.649-1.316-3.414-2.143-5.105-2.394a7.612 7.612 0 00-1.113-.081c-1.838 0-3.541.694-4.983 2.038z"></path></svg>
</div>
@@ -154,6 +159,8 @@
</span>
{{else}}
<span style="display: flex;align-items: center;justify-content: flex-end;cursor: pointer;">
<span style="line-height: 1;color: #101010;margin-bottom: -2px;"><i class="ri-download-line" style="font-size: 1.3em;"></i></span>
<span style="line-height: 1;color: #101010;margin-right: 0.6rem;">{{.DownloadTimes}}</span>
<div style="line-height: 1;margin-right: 4px;margin-bottom: -2px;">
<svg width="1.4em" height="1.4em" viewBox="0 0 32 32" class="heart-stroke" :class='{stars_active:starActives[{{$k}}]}'><path d="M4.4 6.54c-1.761 1.643-2.6 3.793-2.36 6.056.24 2.263 1.507 4.521 3.663 6.534a29110.9 29110.9 0 0010.296 9.633l10.297-9.633c2.157-2.013 3.424-4.273 3.664-6.536.24-2.264-.599-4.412-2.36-6.056-1.73-1.613-3.84-2.29-6.097-1.955-1.689.25-3.454 1.078-5.105 2.394l-.4.319-.398-.319c-1.649-1.316-3.414-2.143-5.105-2.394a7.612 7.612 0 00-1.113-.081c-1.838 0-3.541.694-4.983 2.038z"></path></svg>
</div>
@@ -161,7 +168,7 @@
</span>
{{end}}
</div>
<div style="font-size: 16px;color:#0366D6;font-family: SourceHanSansSC-medium;height: 27px;font-weight: bold;">{{.Title}}</div>
<div style="font-size: 16px;color:#0366D6;font-family: SourceHanSansSC-medium;height: 27px;font-weight: bold;display: flex;align-items: center"><span title="{{.Title}}" class="nowrap" style="display: inline-block;">{{.Title}}</span>{{if .Recommend}}<img src="/img/jian.svg" style="margin-left: 0.5rem;">{{end}}</div>
{{if or (.Category) (.Task) (.License)}}
<div style="font-size: 12px;margin-top: 5px;">
{{if .Category}}


+ 1
- 0
templates/mail/auth/activate.tmpl View File

@@ -11,5 +11,6 @@
<p><a href="{{AppUrl}}user/activate?code={{.Code}}">{{AppUrl}}user/activate?code={{.Code}}</a></p>
<p>Not working? Try copying and pasting it to your browser.</p>
<p>© <a target="_blank" rel="noopener noreferrer" href="{{AppUrl}}">{{AppName}}</a></p>
<p>退订(TD)</p>
</body>
</html>

+ 1
- 0
templates/mail/auth/activate_email.tmpl View File

@@ -11,5 +11,6 @@
<p><a href="{{AppUrl}}user/activate_email?code={{.Code}}&email={{.Email}}">{{AppUrl}}user/activate_email?code={{.Code}}&email={{.Email}}</a></p>
<p>Not working? Try copying and pasting it to your browser.</p>
<p>© <a target="_blank" rel="noopener noreferrer" href="{{AppUrl}}">{{AppName}}</a></p>
<p>退订(TD)</p>
</body>
</html>

+ 1
- 0
templates/mail/auth/register_notify.tmpl View File

@@ -11,5 +11,6 @@
<p><a href="{{AppUrl}}user/login">{{AppUrl}}user/login</a></p>
<p>If this account has been created for you, please <a href="{{AppUrl}}user/forgot_password">set your password</a> first.</p>
<p>© <a target="_blank" rel="noopener noreferrer" href="{{AppUrl}}">{{AppName}}</a></p>
<p>退订(TD)</p>
</body>
</html>

+ 1
- 0
templates/mail/auth/reset_passwd.tmpl View File

@@ -12,5 +12,6 @@
<p><a href="{{AppUrl}}user/recover_account?code={{.Code}}">{{AppUrl}}user/recover_account?code={{.Code}}</a></p>
<p>Not working? Try copying and pasting it to your browser.</p>
<p>© <a target="_blank" rel="noopener noreferrer" href="{{AppUrl}}">{{AppName}}</a></p>
<p>退订(TD)</p>
</body>
</html>

+ 2
- 0
templates/mail/issue/assigned.tmpl View File

@@ -15,6 +15,8 @@
---
<br>
<a href="{{.Link}}">View it on {{AppName}}</a>.
<br>
退订(TD)
</p>
</div>
</body>


+ 2
- 0
templates/mail/issue/default.tmpl View File

@@ -53,6 +53,8 @@
---
<br>
<a href="{{.Link}}">View it on {{AppName}}</a>.
<br>
退订(TD)
</p>
</div>
</body>


+ 2
- 0
templates/mail/notify/collaborator.tmpl View File

@@ -15,6 +15,8 @@
---
<br>
<a href="{{.Link}}">View it on {{AppName}}</a>.
<br>
退订(TD)
</p>
</div>
</body>


+ 4
- 6
templates/repo/attachment/upload.tmpl View File

@@ -13,17 +13,15 @@
<el-form label-width="140px">
{{.CsrfTokenHtml}}
<el-form-item label='{{$.i18n.Tr "dataset.dataset_available_clusters"}}:' prop="title">
<el-button :class="{active:type==0}" size="small" style="margin: 0;border-radius: 0.28571429rem 0 0 0.28571429rem;" @click="uploadGpu">CPU/GPU</el-button>
<el-button :class="{active:type==1}" size="small" style="margin: 0 0 0 -4px;border-radius: 0 0.28571429rem 0.28571429rem 0;" @click="uploadNpu">NPU</el-button>
<!-- <span>请输入字母、数字、_和-,最长64个字符,且不能以中划线(-)结尾。</span> -->
<el-button :class="{active:type==0}" :disabled="clusterFlag" size="small" style="margin: 0;border-radius: 0.28571429rem 0 0 0.28571429rem;" @click="uploadGpu">CPU/GPU</el-button>
<el-button :class="{active:type==1}" :disabled="clusterFlag" size="small" style="margin: 0 0 0 -4px;border-radius: 0 0.28571429rem 0.28571429rem 0;" @click="uploadNpu">NPU</el-button>
</el-form-item>
<el-form-item label='{{$.i18n.Tr "dataset.file_description"}}:' prop="description">
<el-input type="textarea" :rows="3" maxlength="255" placeholder="{{$.i18n.Tr "repo.modelarts.train_job.new_place"}}" v-model="desc"></el-input>
</el-form-item>
<el-form-item label='{{$.i18n.Tr "dataset.data_upload"}}:' prop="category">
<minio-uploader :uploadtype="type" :desc="desc"></minio-uploader>
</el-form-item>
<minio-uploader :uploadtype="type" :desc="desc" @setcluster="setcluster"></minio-uploader>
</el-form-item>
<div style='display:none;'
id="minioUploader-params"
data-uuid="{{.uuid}}"


+ 20
- 1
templates/repo/cloudbrain/new.tmpl View File

@@ -179,7 +179,26 @@
</div>
</div>
<input id="store_category" type="hidden" name="get_benchmark_category">

<div class="inline required field">
<label>{{.i18n.Tr "repo.modelarts.code_version"}}</label>
<select class="ui dropdown width80 left2" id="code_version" name="branch_name">
{{if .branch_name}}
<option name="branch_name" value="{{.branch_name}}">{{.branch_name}}</option>
{{range $k, $v :=.Branches}}
{{ if ne $v $.branch_name }}
<option name="branch_name" value="{{$v}}">{{$v}}</option>
{{end}}
{{end}}
{{else}}
<option name="branch_name" value="{{.branchName}}">{{.branchName}}</option>
{{range $k, $v :=.Branches}}
{{ if ne $v $.branchName }}
<option name="branch_name" value="{{$v}}">{{$v}}</option>
{{end}}
{{end}}
{{end}}
</select>
</div>
<div class="inline required field">
<label>{{.i18n.Tr "cloudbrain.gpu_type"}}</label>
<select id="cloudbrain_gpu_type" class="ui search dropdown" placeholder="选择GPU类型" style='width:385px' name="gpu_type">


+ 12
- 2
templates/repo/cloudbrain/show.tmpl View File

@@ -56,7 +56,7 @@
margin:10px 5px ;
}
.tab_2_content {
min-height: 380px;
min-height: 420px;
margin-left: 10px;
}
.ac-grid {
@@ -187,7 +187,7 @@ td, th {
{{.i18n.Tr "repo.cloudbrain"}}
</a>
<div class="divider"> / </div>
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}">
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all">
{{$.i18n.Tr "repo.modelarts.notebook"}}
</a>
<div class="divider"> / </div>
@@ -289,6 +289,16 @@ td, th {
</div>
</td>
</tr>
<tr class="ti-no-ng-animate">
<td class="ti-no-ng-animate ti-text-form-label text-width80">
{{$.i18n.Tr "repo.modelarts.code_version"}}
</td>
<td class="ti-text-form-content">
<div class="text-span text-span-w" id="{{.VersionName}}-code">
{{.BranchName}}
</div>
</td>
</tr>
<tr class="ti-no-ng-animate">
<td class="ti-no-ng-animate ti-text-form-label text-width80">
{{$.i18n.Tr "cloudbrain.gpu_type"}}


+ 1
- 1
templates/repo/datasets/index.tmpl View File

@@ -281,7 +281,7 @@
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item class="clipboard" data-clipboard-text="{{.DownloadURL}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_url"}}</el-dropdown-item>
<el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>
<!-- <el-dropdown-item class="clipboard" data-clipboard-text="{{.Md5}}" data-clipboard-action="copy">{{$.i18n.Tr "dataset.copy_md5"}}</el-dropdown-item>-->
{{if and ($.CanWrite) (eq .DecompressState 1) }}
<el-dropdown-item @click.native="gotoAnnotate('{{$.RepoLink}}','{{.UUID}}',{{.Type}})">{{$.i18n.Tr "dataset.annotation"}}</el-dropdown-item>
{{end}}


+ 2
- 2
templates/repo/issue/branch_selector_field.tmpl View File

@@ -14,12 +14,12 @@
<div class="ui grid">
<div class="two column row">
<a class="reference column" href="#" data-target="#branch-list">
<span class="text black">
<span class="text ">
{{svg "octicon-git-branch" 16}} {{.i18n.Tr "repo.branches"}}
</span>
</a>
<a class="reference column" href="#" data-target="#tag-list">
<span class="text">
<span class="text black">
<i class="reference tags icon"></i> {{.i18n.Tr "repo.tags"}}
</span>
</a>


+ 35
- 0
templates/repo/issue/view_content/comments.tmpl View File

@@ -594,5 +594,40 @@
{{end}}
</span>
</div>
{{else if eq .Type 29}}
<div class="timeline-item event" id="{{.HashTag}}">
<span class="badge">{{svg "octicon-git-branch" 16}}</span>
<a class="ui avatar image" href="{{.Poster.HomeLink}}">
<img src="{{.Poster.RelAvatarLink}}">
</a>
<span class="text grey">
<a class="author" href="{{.Poster.HomeLink}}">{{.Poster.GetDisplayName}}</a>

{{ $refOldName:= GetRefName .OldRef }}
{{ $refNewName:= GetRefName .NewRef }}

{{if .OldRef }}
{{if .NewRef }}
{{$.i18n.Tr "repo.issues.change_branch_tag_at" ($refOldName|Escape) ($refNewName|Escape) $createdStr | Safe}}
{{else}}
{{ $getRefOldType:= GetRefType .OldRef }}
{{ if eq $getRefOldType "branch"}}
{{$.i18n.Tr "repo.issues.remove_branch_at" ($refOldName|Escape) $createdStr | Safe}}
{{else}}
{{$.i18n.Tr "repo.issues.remove_tag_at" ($refOldName|Escape) $createdStr | Safe}}
{{end}}
{{end}}
{{else}}
{{if .NewRef}}
{{ $getRefNewType:= GetRefType .NewRef }}
{{ if eq $getRefNewType "branch"}}
{{$.i18n.Tr "repo.issues.add_branch_at" ($refNewName|Escape) $createdStr | Safe}}
{{else}}
{{$.i18n.Tr "repo.issues.add_tag_at" ($refNewName|Escape) $createdStr | Safe}}
{{end}}
{{end}}
{{end}}
</span>
</div>
{{end}}
{{end}}

+ 48
- 1
templates/repo/issue/view_content/sidebar.tmpl View File

@@ -1,6 +1,52 @@
<div class="four wide column">
<div class="ui segment metas">
{{template "repo/issue/branch_selector_field" .}}
<!-- {{template "repo/issue/branch_selector_field" .}} -->
{{if and (not .Issue.IsPull) (not .PageIsComparePull)}}
<input id="ref_selector" name="ref" type="hidden" value="{{.Issue.Ref}}">
<div class="ui {{if or (not .HasIssuesOrPullsWritePermission) .Repository.IsArchived}}disabled{{end}} floating filter select-branch dropdown" data-no-results="{{.i18n.Tr "repo.pulls.no_results"}}">
<div class="ui basic small button">
<span class="text branch-name">{{if .Issue.Ref}}{{$.RefEndName}}{{else}}{{.i18n.Tr "repo.issues.no_ref"}}{{end}}</span>
<i class="dropdown icon"></i>
</div>
<div class="menu" data-action="update" data-issue-id="{{$.Issue.ID}}" data-update-url="{{$.RepoLink}}/issues/ref">
<div class="ui icon search input">
<i class="filter icon"></i>
<input name="search" placeholder="{{.i18n.Tr "repo.filter_branch_and_tag"}}...">
</div>
<div class="no-select item">{{.i18n.Tr "repo.issues.new.clear_branch_tag"}}</div>

<div class="header">
<div class="ui grid">
<div class="two column row">
<a class="reference column" href="#" data-target="#branch-list">
<span class="text">
{{svg "octicon-git-branch" 16}} {{.i18n.Tr "repo.branches"}}
</span>
</a>
<a class="reference column" href="#" data-target="#tag-list">
<span class="text black">
<i class="reference tags icon"></i> {{.i18n.Tr "repo.tags"}}
</span>
</a>
</div>
</div>
</div>
<div id="branch-list" class="scrolling menu reference-list-menu">
{{range .Branches}}
<div class="item" data-id="refs/heads/{{.}}" data-name="{{.}}" data-id-selector="#ref_selector">{{.}}</div>
{{end}}
</div>
<div id="tag-list" class="scrolling menu reference-list-menu" style="display: none">
{{range .Tags}}
<div class="item" data-id="refs/tags/{{.}}" data-name="tags/{{.}}" data-id-selector="#ref_selector">{{.}}</div>
{{end}}
</div>
</div>
</div>
<div class="ui divider"></div>
{{end}}

{{if .Issue.IsPull }}

@@ -600,3 +646,4 @@
</div>
{{end}}
{{end}}


+ 1
- 1
templates/repo/modelarts/notebook/show.tmpl View File

@@ -193,7 +193,7 @@ td, th {
{{.i18n.Tr "repo.cloudbrain"}}
</a>
<div class="divider"> / </div>
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType={{if eq $.debugListType "NPU"}}NPU{{else if eq $.debugListType "CPU/GPU"}}CPU/GPU{{else}}all{{end}}">
<a class="section backTodeBug" href="{{.RepoLink}}/debugjob?debugListType=all">
{{$.i18n.Tr "repo.modelarts.notebook"}}
</a>
<div class="divider"> / </div>


+ 26
- 16
templates/repo/modelarts/trainjob/new.tmpl View File

@@ -233,8 +233,13 @@

<div class="ui labeled input" style="width: 5%;">

<input style="border-radius: 0;text-align: center;" name="work_server_number" id="trainjob_work_server_num" tabindex="3" autofocus required maxlength="255" value="1" readonly>

<input style="border-radius: 0;text-align: center;"type="hidden" name="work_server_number" id="trainjob_work_server_num" tabindex="3" autofocus required maxlength="255" value="1" readonly>
<div class="field" id="trainjob_work_server_num_select" name="work_server_number_select">
<select class="ui dropdown width" style='width: 100%;' name="work_server_id">
<option name="server_id" value="1">1</option>
<option name="server_id" value="2">2</option>
</select>
</div>

</div>
</div>
@@ -263,19 +268,20 @@
$('.menu .item')
.tab();

let sever_num = $('#trainjob_work_server_num')
$('.add').click(function(){
sever_num.val(parseInt(sever_num.val())+1)
if(sever_num.val()>=26){
sever_num.val(parseInt(sever_num.val())-1)
}
})
$('.min').click(function(){
sever_num.val(parseInt(sever_num.val())-1)
if(sever_num.val()<=0){
sever_num.val(parseInt(sever_num.val())+1)
}
})
// let sever_num = $("#trainjob_work_server_num_select .text").text() //$('#trainjob_work_server_num')
// console.log("sever_num:",sever_num)
// $('.add').click(function(){
// sever_num.val(parseInt(sever_num.val())+1)
// if(sever_num.val()>=26){
// sever_num.val(parseInt(sever_num.val())-1)
// }
// })
// $('.min').click(function(){
// sever_num.val(parseInt(sever_num.val())-1)
// if(sever_num.val()<=0){
// sever_num.val(parseInt(sever_num.val())+1)
// }
// })
// 参数增加、删除、修改、保存
function Add_parameter(i){
value = '<div class="two fields width85" id= "para'+ i +'">' +
@@ -349,7 +355,7 @@
// $("select[name='pool_id']").val(parameters[i]);
// break;
case (6):
$("input[name='work_server_number']").val(parameters[i]);
// $("input[name='work_server_number']").val(parameters[i]);
break;
}
}
@@ -456,6 +462,10 @@
$("input#ai_engine_name").val(name1)
$("input#ai_flaver_name").val(name2)

let val_server_num_select = $("#trainjob_work_server_num_select .text").text()
// console.log("val_server_num_select:",val_server_num_select)
$("input#trainjob_work_server_num").val(val_server_num_select)

}
$('.ui.create_train_job.green.button').click(function(e) {
get_name()


+ 99
- 6
templates/repo/modelarts/trainjob/show.tmpl View File

@@ -249,7 +249,7 @@ td, th {
<div class="ui pointing secondary menu" style="border-bottom: 1px solid rgba(34,36,38,.15);">

<a class="active item" data-tab="first{{$k}}">{{$.i18n.Tr "repo.modelarts.train_job.config"}}</a>
<a class="item" data-tab="second{{$k}}" onclick="loadLog({{.VersionName}})">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item log_bottom" data-tab="second{{$k}}" data-version="{{.VersionName}}">{{$.i18n.Tr "repo.modelarts.log"}}</a>
<a class="item" data-tab="third{{$k}}" onclick="loadModelFile({{.VersionName}},'','','init')">{{$.i18n.Tr "repo.model_download"}}</a>
</div>
<div class="ui tab active" data-tab="first{{$k}}">
@@ -420,11 +420,17 @@ td, th {
</div>
</div>
<div class="ui tab" data-tab="second{{$k}}">
<div>
<div style="position: relative;">
<span>
<a title="滚动到顶部" style="position: absolute; right: -32px;cursor: pointer;" class="log_top" data-version="{{.VersionName}}"><i class="icon-to-top"></i></a>
</span>
<span>
<a title="滚动到底部" style="position: absolute; bottom: 10px;right: -32px;cursor: pointer;" class="log_bottom" data-version="{{.VersionName}}"><i class="icon-to-bottom"></i></a>
</span>
<div class="ui message message{{.VersionName}}" style="display: none;">
<div id="header"></div>
</div>
<div class="ui attached log" onscroll="logScroll({{.VersionName}})" id="log{{.VersionName}}" style="height: 300px !important; overflow: auto;">
<div class="ui attached log" onscroll="fn({{.VersionName}})" id="log{{.VersionName}}" style="height: 300px !important; overflow: auto;">
<input type="hidden" name="end_line" value>
<input type="hidden" name="start_line" value>
<pre id="log_file{{.VersionName}}"></pre>
@@ -830,15 +836,28 @@ td, th {
html += "</div>"
$(`#dir_list${version_name}`).append(html)
}
function debounce(fn,delay){
let timer;
return (...args) => {
// 判断定时器是否存在,清除定时器
if (timer) {
clearTimeout(timer);
}

// 重新调用setTimeout
timer = setTimeout(() => {
fn.apply(this, args);
}, delay);
};
}
const fn = debounce(logScroll, 500)
function logScroll(version_name) {

let container = document.querySelector(`#log${version_name}`)
let scrollTop = container.scrollTop
let scrollHeight = container.scrollHeight
let clientHeight = container.clientHeight
let scrollLeft = container.scrollLeft
if((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight) && (scrollLeft===0)){
if(((parseInt(scrollTop) + clientHeight == scrollHeight || parseInt(scrollTop) + clientHeight +1 == scrollHeight || parseInt(scrollTop) + clientHeight - 1 == scrollHeight)) && parseInt(scrollTop)!==0 && scrollLeft==0){
let end_line = $(`#log${version_name} input[name=end_line]`).val()
$.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${end_line}&lines=50&order=desc`, (data) => {
if (data.Lines == 0){
@@ -861,7 +880,7 @@ td, th {
console.log(err);
});
}
if(scrollTop == 0 && scrollLeft==0){
if([0,1,2,3,4,5,6,7,8,9,10].includes(scrollTop) && scrollLeft==0){
let start_line = $(`#log${version_name} input[name=start_line]`).val()
$.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${start_line}&lines=50&order=asc`, (data) => {
if (data.Lines == 0){
@@ -879,4 +898,78 @@ td, th {
});
}
}
function scrollAnimation(dom, currentY, targetY, currentX) {
let needScrollTop = targetY - currentY;
let _currentY = currentY;
setTimeout(() => {
// 一次调用滑动帧数,每次调用会不一样
//取总距离的十分之一
const dist = Math.ceil(needScrollTop / 10);
_currentY += dist;
//移动一个十分之一
dom.scrollTo(currentX || 0, _currentY,'smooth');
// 如果移动幅度小于十个像素,直接移动,否则递归调用,实现动画效果
if (needScrollTop > 10 || needScrollTop < -10) {
scrollAnimation(dom, _currentY, targetY)
} else {
dom.scrollTo(0, targetY,'smooth')
}
}, 1)
}

$('.log_top').click(function(){
// let logContentDom = document.querySelector('.log')
// if(!logContentDom)
// return
// let version_name = $('.log_top').data('version')
let version_name = $(this).data('version')
let logContentDom = document.querySelector(`#log${version_name}`)
$(`#log_file${version_name}`).siblings('pre').remove()
$.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=asc`, (data) => {
$(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值
$(`#log${version_name} input[name=start_line]`).val(data.StartLine)
$(`#log${version_name}`).prepend('<pre>' + data.Content)
$(`.message${version_name} #header`).text('您已翻阅至日志顶部')
$(`.message${version_name}`).css('display', 'block')
setTimeout(function(){
$(`.message${version_name}`).css('display', 'none')
}, 1000)
scrollAnimation(logContentDom, logContentDom.scrollTop, 0);
})

})
$('.log_bottom').click(function(e){
let version_name = $(this).data('version')
let logContentDom = document.querySelector(`#log${version_name}`)
$(`#log_file${version_name}`).siblings('pre').remove()
$.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=&lines=50&order=desc`, (data) => {
$(`#log${version_name} input[name=end_line]`).val(data.EndLine) //如果变动就改变所对应的值
$(`#log${version_name} input[name=start_line]`).val(data.StartLine)
$(`#log${version_name}`).append('<pre>' + data.Content)
$.get(`/api/v1/repos/${userName}/${repoPath}/modelarts/train-job/${jobID}/log?version_name=${version_name}&base_line=${data.EndLine}&lines=50&order=desc`, (data) => {
if (data.Lines == 0){
$(`.message${version_name} #header`).text('您已翻阅至日志底部')
$(`.message${version_name}`).css('display', 'block')
setTimeout(function(){
$(`.message${version_name}`).css('display', 'none')
}, 1000)
}else{
if(end_line===data.EndLine){
return
}
else{
$(`#log${version_name} input[name=end_line]`).val(data.EndLine)
$(`#log${version_name}`).append('<pre>' + data.Content)
}

}
}).fail(function(err) {
console.log(err);
});
scrollAnimation(logContentDom, logContentDom.scrollTop+1, logContentDom.scrollHeight - logContentDom.clientHeight);
})
})
</script>

+ 1
- 1
templates/user/dashboard/feeds.tmpl View File

@@ -71,7 +71,7 @@
{{ $index := index .GetIssueInfos 0}}
{{$.i18n.Tr "action.comment_pull" .GetRepoLink $index .ShortRepoPath | Str2html}}
{{else if eq .GetOpType 24}}
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .Content .RefName | Str2html}}
{{$.i18n.Tr "action.upload_dataset" .GetRepoLink .RefName | Str2html}}
{{else if eq .GetOpType 25}}
{{$.i18n.Tr "action.task_gpudebugjob" .GetRepoLink .Content .RefName | Str2html}}
{{else if eq .GetOpType 26}}


+ 177
- 96
web_src/js/components/MinioUploader.vue View File

@@ -1,25 +1,31 @@
<template>
<div class="dropzone-wrapper dataset-files">
<div
id="dataset"
class="dropzone"
/>
<p class="upload-info">
{{ file_status_text }}
<strong class="success text red">{{ status }}</strong>
</p>
<el-button style="background-color: #21ba45;" type="success" :disabled="btnFlag" @click="onFileAdded">{{upload}}</el-button>
<div id="dataset" class="dropzone">
<div class="maxfilesize ui red message" style="display: none;margin: 2.5rem;"></div>
</div>
<el-button style="background-color: #21ba45;margin-top: 2rem;" type="success" :disabled="btnFlag" @click="startUpload">{{upload}}</el-button>
<el-button type="info" @click="cancelDataset">{{cancel}}</el-button>
<!-- <p>说明:<br>
- 只有zip格式的数据集才能发起云脑任务;<br>
- 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。</p> -->
<div style="margin-top: 2rem;position: relative;">
<label class="el-form-item__label" style="width: 140px;position: absolute;left: -140px;">上传状态:</label>
<div v-for="item in allUploadFiles" style="display:flex;padding: 0.8rem 0;border-bottom: 1px solid #e8e8e8;line-height: 1;" >
<span style="flex:4 1 0%;display: flex;max-width: 80%;"><i :class="[item.status===0?'ri-checkbox-circle-line success':'ri-close-circle-line failed']" style="margin-right: 0.5rem;"></i><span class="nowrap">{{item.name}}</span></span>
<span style="flex:1" v-if="item.status===0"><span style="color: #21ba45;">上传成功</span></span>
<span style="flex:1" v-else-if="item.status===1">
<el-tooltip class="item" effect="dark" placement="top">
<div slot="content">{{item.info}}</div>
<span style="color: red;cursor: pointer;">上传失败<span>(重复上传)</span></span>
</el-tooltip>
</span>
<span style="flex:1" v-else><span style="color: red;">上传失败</span></span>
</div>
</div>
</div>
</template>

<script>
/* eslint-disable eqeqeq */
// import Dropzone from 'dropzone/dist/dropzone.js';
// import 'dropzone/dist/dropzone.css'
import SparkMD5 from 'spark-md5';
import axios from 'axios';
import qs from 'qs';
@@ -27,6 +33,7 @@ import createDropzone from '../features/dropzone.js';

const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config;
const chunkSize = 1024 * 1024 * 64;
const md5ChunkSize = 1024 * 1024 * 1;

export default {
props:{
@@ -42,8 +49,8 @@ export default {
data() {
return {
dropzoneUploader: null,
maxFiles: 1,
maxFilesize: 1 * 1024 * 1024 * 1024 * 1024,
maxFiles: 10,
maxFilesize: 200 ,
acceptedFiles: '*/*',
progress: 0,
status: '',
@@ -54,6 +61,11 @@ export default {
btnFlag:false,
cancel:'',
upload:'',
uploadFiles:[],
uploadFilesAddId:[],
allUploadFiles:[],
uploadLength:0,
allUploadLength:0,
};
},

@@ -64,82 +76,98 @@ export default {
this.repoPath = this.dropzoneParams.data('repopath');
this.cancel = this.dropzoneParams.data('cancel');
this.upload = this.dropzoneParams.data('upload');
// let previewTemplate = '';
// previewTemplate += '<div class="dz-preview dz-file-preview">\n ';
// previewTemplate += ' <div class="dz-details">\n ';
// previewTemplate += ' <div class="dz-filename">';
// previewTemplate +=
// ' <span data-dz-name data-dz-thumbnail></span>';
// previewTemplate += ' </div>\n ';
// previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n ';
// previewTemplate += ' </div>\n ';
// previewTemplate += ' <div class="dz-progress ui active progress">';
// previewTemplate +=
// ' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n ';
// previewTemplate += ' </div>\n ';
// previewTemplate += ' <div class="dz-success-mark">';
// previewTemplate += ' <span>上传成功</span>';
// previewTemplate += ' </div>\n ';
// previewTemplate += ' <div class="dz-error-mark">';
// previewTemplate += ' <span>上传失败</span>';
// previewTemplate += ' </div>\n ';
// previewTemplate += ' <div class="dz-error-message">';
// previewTemplate += ' <span data-dz-errormessage></span>';
// previewTemplate += ' </div>\n';
// previewTemplate += '</div>';
let previewTemplate = ''
previewTemplate += '<div class="dz-preview dz-file-preview" style="width:100%;background: none;">'
previewTemplate += '<div class="dz-details" style="opacity: 1;">'
previewTemplate += '<div class="dz-filename"><span data-dz-name></span></div>'
previewTemplate += '<div class="dz-size" data-dz-size></div>'
previewTemplate += '<div class="dz-progress ui active progress" style="top: 75%;width: 80%;left: 15%;"><div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div></div>'
// previewTemplate += '<img data-dz-thumbnail />'
previewTemplate += '</div>'
previewTemplate += '<div class="dz-success-mark"><span>✔</span></div>'
previewTemplate += '<div class="dz-error-mark"><span>✘</span></div>'
previewTemplate += '<div class="dz-error-message"><span data-dz-errormessage></span></div>'
previewTemplate += '</div>'
let previewTemplate = `
<div class="dz-preview dz-file-preview">
<div class="dz-image">
<img data-dz-thumbnail />
</div>
<div class="dz-details">
<div class="dz-size"><span data-dz-size></span></div>
<div class="dz-filename"><span data-dz-name></span></div>
</div>

<div class="dz-progress"><span class="dz-upload" data-dz-uploadprogress></span></div>
<div class="dz-error-message" style="line-height: 1.5;"><span data-dz-errormessage></span></div>
<div class="dz-success-mark"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="54" height="54"><path fill="none" d="M0 0h24v24H0z"/><path d="M12 22C6.477 22 2 17.523 2 12S6.477 2 12 2s10 4.477 10 10-4.477 10-10 10zm0-2a8 8 0 1 0 0-16 8 8 0 0 0 0 16zm-.997-4L6.76 11.757l1.414-1.414 2.829 2.829 5.656-5.657 1.415 1.414L11.003 16z" fill="rgba(47,204,113,1)"/></svg></div>
<div class="dz-error-mark"><svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="54" height="54"><path fill="none" d="M0 0h24v24H0z"/><path d="M12 22C6.477 22 2 17.523 2 12S6.477 2 12 2s10 4.477 10 10-4.477 10-10 10zm0-2a8 8 0 1 0 0-16 8 8 0 0 0 0 16zm0-9.414l2.828-2.829 1.415 1.415L13.414 12l2.829 2.828-1.415 1.415L12 13.414l-2.828 2.829-1.415-1.415L10.586 12 7.757 9.172l1.415-1.415L12 10.586z" fill="rgba(231,76,60,1)"/></svg></div>
</div> `
const $dropzone = $('div#dataset');
const dropzoneUploader = await createDropzone($dropzone[0], {
url: '/todouploader',
maxFiles: this.maxFiles,
maxFilesize: this.maxFileSize,
maxFilesize: 1024*200,
filesizeBase:1024,
parallelUploads: this.maxFiles,
timeout: 0,
autoQueue: false,
addRemoveLinks:true,
// autoQueue: false,
autoProcessQueue: false, //自动上传
dictDefaultMessage: this.dropzoneParams.data('default-message'),
dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'),
dictFileTooBig: this.dropzoneParams.data('file-too-big'),
dictRemoveFile: this.dropzoneParams.data('remove-file'),
previewTemplate
previewTemplate:previewTemplate
});
dropzoneUploader.on('addedfile', (file) => {
this.file = file
});
dropzoneUploader.on('maxfilesexceeded', function (file) {
if (this.files[0].status !== 'success') {
alert(this.dropzoneParams.data('waitting-uploading'));
this.removeFile(file);
return;
if(file.size/(1024*1024)>dropzoneUploader.options.maxFilesize){
dropzoneUploader.removeFile(file)
$('.maxfilesize.ui.red.message').text("单次最多上传10个文件,单个文件不超过200G")
$('.maxfilesize.ui.red.message').css('display','block')
}else{
this.file = file
$('.maxfilesize.ui.red.message').css('display','none')
}
this.removeAllFiles();
this.addFile(file);
});
dropzoneUploader.on("removedfile",(file)=>{
$('.maxfilesize.ui.red.message').css('display','none')
})
dropzoneUploader.on('maxfilesexceeded', function (file) {
dropzoneUploader.removeFile(file)
$('.maxfilesize.ui.red.message').text("单次最多上传10个文件,单个文件不超过200G")
$('.maxfilesize.ui.red.message').css('display','block')

});
this.dropzoneUploader = dropzoneUploader;
},
watch:{
allUploadLength(len){
if(len===this.uploadFiles.length){
setTimeout(() => {
this.dropzoneUploader.removeAllFiles(true)
this.btnFlag = false
this.$emit('setcluster',this.btnFlag)
}, 2000);
}
}
},
methods: {
startUpload(){
this.uploadFiles = this.dropzoneUploader.getQueuedFiles()
if(this.uploadFiles.length===0){
return
}
this.resetStatus()
$('.dz-remove').remove()
$('.maxfilesize.ui.red.message').css('display','none')
this.btnFlag = true
this.$emit('setcluster',this.btnFlag)
this.uploadFiles.forEach(element => {
element.datasetId = document.getElementById('datasetId').getAttribute('datasetId')
this.computeMD5(element)
});
},
cancelDataset(){
location.href = this.repoPath
this.dropzoneUploader.removeAllFiles(true)
},
resetStatus() {
this.progress = 0;
this.status = '';
this.uploadLength = 0
this.allUploadLength = 0
this.allUploadFiles = []
},
updateProgress(file, progress) {
console.log("progress---",progress)
file.previewTemplate.querySelector(
'.dz-upload'
).style.width = `${progress}%`
@@ -147,6 +175,26 @@ export default {
'.dz-upload'
).style.background = '#409eff';
},
uploadError(file,info){
file.previewTemplate.querySelector(
'.dz-error-mark'
).style.opacity = 1
file.previewTemplate.querySelector(
'.dz-progress'
).style.opacity = 0
file.previewTemplate.querySelector(
'.dz-error-message span'
).innerHTML = info
file.previewTemplate.querySelector(
'.dz-error-message'
).style.display = 'block'
file.previewTemplate.querySelector(
'.dz-details'
).onmouseover = function(){file.previewTemplate.querySelector('.dz-error-message').style.opacity = 1 }
file.previewTemplate.querySelector(
'.dz-details'
).onmouseout = function(){file.previewTemplate.querySelector('.dz-error-message').style.opacity = 0 }
},
emitDropzoneSuccess(file) {
file.status = 'success';
this.dropzoneUploader.emit('success', file);
@@ -158,28 +206,22 @@ export default {
this.dropzoneUploader.emit('error', file);
// this.dropzoneUploader.emit('complete', file);
},
onFileAdded() {
this.btnFlag = true
this.file.datasetId = document
.getElementById('datasetId')
.getAttribute('datasetId');
this.resetStatus();
if(!this.file?.upload){
this.btnFlag = false
return
}
this.computeMD5(this.file);
},

finishUpload(file) {
this.emitDropzoneSuccess(file);
setTimeout(() => {
console.log("finish",file)
file.previewTemplate.querySelector(
'.dz-success-mark'
).style.opacity = 1
file.previewTemplate.querySelector(
'.dz-progress'
).style.opacity = 0
if(this.uploadLength === this.uploadFiles.length){
setTimeout(() => {
window.location.href = this.repoPath
}, 1000);
}, 1000);
}
},

computeMD5(file) {
this.resetStatus();
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
@@ -188,12 +230,13 @@ export default {
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
let currentChunk = 0;

const time = new Date().getTime();
// console.log('计算MD5...')
this.status = this.dropzoneParams.data('md5-computing');
file.totalChunkCounts = chunks;
loadNext();
if (file.size==0) {
file.totalChunkCounts = 1
}
loadMd5Next();

fileReader.onload = (e) => {
fileLoaded.call(this, e);
@@ -207,13 +250,12 @@ export default {
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
this.status = `${this.dropzoneParams.data('loading-file')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}% (${currentChunk}/${chunks})`;
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
loadNext();
loadMd5Next();
return;
}

@@ -223,6 +265,7 @@ export default {
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
this.updateProgress(file,100)
spark.destroy(); // 释放缓存
file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识
file.cmd5 = false; // 取消计算md5状态
@@ -235,6 +278,13 @@ export default {
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}

function loadMd5Next() {
const start = currentChunk * chunkSize;
const end =
start + md5ChunkSize >= file.size ? file.size : start + md5ChunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
},

async computeMD5Success(md5edFile) {
@@ -248,6 +298,10 @@ export default {
this.multipartUpload(file);
} else {
// 失败如何处理
let info = "上传失败"
this.allUploadLength++
this.uploadError(file,info)
this.allUploadFiles.push({name:file.name,status:2,info:info})
return;
}
return;
@@ -263,15 +317,16 @@ export default {
//不同数据集上传同一个文件
if (file.datasetID != '') {
if (file.datasetName != "" && file.realName != "") {
var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")";
window.alert(info);
window.location.reload();
let info = `该文件已上传在数据集: ${file.datasetName}`
this.uploadError(file,info)
this.allUploadLength++
this.allUploadFiles.push({name:file.name,status:1,info:info})
}
}
console.log('文件已上传完成');
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
this.finishUpload(file);
// this.finishUpload(file);
} else {
// 断点续传
this.multipartUpload(file);
@@ -479,7 +534,12 @@ export default {
this.status = this.dropzoneParams.data('uploading');
loadNext();
fileReader.onload = async (e) => {
await uploadChunk(e);
try{
await uploadChunk(e);
}catch(err){
console.log(err)
}
fileReader.abort();
currentChunk++;
if (currentChunk < chunks) {
@@ -495,12 +555,27 @@ export default {
).toFixed(2)}%`;
await loadNext();
} else {
await completeUpload();
try{
await completeUpload();
}catch(err){
let info = "上传失败"
this.allUploadLength++
this.uploadError(file,info)
this.allUploadFiles.push({name:file.name,status:2,info:info})
if(err){
return
}
}
console.log(
`文件上传完成:${file.name} \n分片:${chunks} 大小:${
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
this.uploadLength++
this.allUploadLength++
this.allUploadFiles.push({name:file.name,status:0,info:'上传成功'})
this.updateProgress(file, 100);
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
@@ -536,4 +611,10 @@ export default {
margin-top: 1em;
margin-bottom: 3em;
}
.success{
color: #21ba45;
}
.failed{
color: red;
}
</style>

+ 0
- 484
web_src/js/components/ObsUploader.vue View File

@@ -1,484 +0,0 @@
<template>
<div class="dropzone-wrapper dataset-files">
<div
id="dataset"
class="dropzone"
/>
<p class="upload-info">
{{ file_status_text }}
<strong class="success text red">{{ status }}</strong>
</p>
<p>说明:<br>
- 只有zip格式的数据集才能发起云脑任务;<br>
- 云脑1提供 <span class="text blue">CPU / GPU</span> 资源,云脑2提供 <span class="text blue">Ascend NPU</span> 资源;调试使用的数据集也需要上传到对应的环境。
</p>
</div>
</template>

<script>
/* eslint-disable eqeqeq */
// import Dropzone from 'dropzone/dist/dropzone.js';
// import 'dropzone/dist/dropzone.css'
import SparkMD5 from 'spark-md5';
import axios from 'axios';
import qs from 'qs';
import createDropzone from '../features/dropzone.js';

const {_AppSubUrl, _StaticUrlPrefix, csrf} = window.config;
const CloudBrainType = 1;

export default {
data() {
return {
dropzoneUploader: null,
maxFiles: 1,
maxFilesize: 1 * 1024 * 1024 * 1024 * 1024,
acceptedFiles: '*/*',
progress: 0,
status: '',
dropzoneParams: {},
file_status_text: ''
};
},

async mounted() {
this.dropzoneParams = $('div#minioUploader-params');
this.file_status_text = this.dropzoneParams.data('file-status');
this.status = this.dropzoneParams.data('file-init-status');

let previewTemplate = '';
previewTemplate += '<div class="dz-preview dz-file-preview">\n ';
previewTemplate += ' <div class="dz-details">\n ';
previewTemplate += ' <div class="dz-filename">';
previewTemplate +=
' <span data-dz-name data-dz-thumbnail></span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-size" data-dz-size style="white-space: nowrap"></div>\n ';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-progress ui active progress">';
previewTemplate +=
' <div class="dz-upload bar" data-dz-uploadprogress><div class="progress"></div></div>\n ';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-success-mark">';
previewTemplate += ' <span>上传成功</span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-error-mark">';
previewTemplate += ' <span>上传失败</span>';
previewTemplate += ' </div>\n ';
previewTemplate += ' <div class="dz-error-message">';
previewTemplate += ' <span data-dz-errormessage></span>';
previewTemplate += ' </div>\n';
previewTemplate += '</div>';

const $dropzone = $('div#dataset');
console.log('createDropzone');
const dropzoneUploader = await createDropzone($dropzone[0], {
url: '/todouploader',
maxFiles: this.maxFiles,
maxFilesize: this.maxFileSize,
timeout: 0,
autoQueue: false,
dictDefaultMessage: this.dropzoneParams.data('default-message'),
dictInvalidFileType: this.dropzoneParams.data('invalid-input-type'),
dictFileTooBig: this.dropzoneParams.data('file-too-big'),
dictRemoveFile: this.dropzoneParams.data('remove-file'),
previewTemplate
});
dropzoneUploader.on('addedfile', (file) => {
setTimeout(() => {
// eslint-disable-next-line no-unused-expressions
file.accepted && this.onFileAdded(file);
}, 200);
});
dropzoneUploader.on('maxfilesexceeded', function (file) {
if (this.files[0].status !== 'success') {
alert(this.dropzoneParams.data('waitting-uploading'));
this.removeFile(file);
return;
}
this.removeAllFiles();
this.addFile(file);
});

this.dropzoneUploader = dropzoneUploader;
},
methods: {
resetStatus() {
this.progress = 0;
this.status = '';
},
updateProgress(file, progress) {
file.previewTemplate.querySelector(
'.dz-upload'
).style.width = `${progress}%`;
},
emitDropzoneSuccess(file) {
file.status = 'success';
this.dropzoneUploader.emit('success', file);
this.dropzoneUploader.emit('complete', file);
},
emitDropzoneFailed(file) {
this.status = this.dropzoneParams.data('falied');
file.status = 'error';
this.dropzoneUploader.emit('error', file);
// this.dropzoneUploader.emit('complete', file);
},
onFileAdded(file) {
file.datasetId = document
.getElementById('datasetId')
.getAttribute('datasetId');
this.resetStatus();
this.computeMD5(file);
},

finishUpload(file) {
this.emitDropzoneSuccess(file);
setTimeout(() => {
window.location.reload();
}, 1000);
},

computeMD5(file) {
this.resetStatus();
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
File.prototype.webkitSlice,
chunkSize = 1024 * 1024 * 64,
chunks = Math.ceil(file.size / chunkSize),
spark = new SparkMD5.ArrayBuffer(),
fileReader = new FileReader();
let currentChunk = 0;

const time = new Date().getTime();
// console.log('计算MD5...')
this.status = this.dropzoneParams.data('md5-computing');
file.totalChunkCounts = chunks;
loadNext();

fileReader.onload = (e) => {
fileLoaded.call(this, e);
};
fileReader.onerror = (err) => {
console.warn('oops, something went wrong.', err);
file.cancel();
};

function fileLoaded(e) {
spark.append(e.target.result); // Append array buffer
currentChunk++;
if (currentChunk < chunks) {
// console.log(`第${currentChunk}分片解析完成, 开始第${currentChunk +1}/${chunks}分片解析`);
this.status = `${this.dropzoneParams.data('loading-file')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}% (${currentChunk}/${chunks})`;
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
loadNext();
return;
}

const md5 = spark.end();
console.log(
`MD5计算完成:${file.name} \nMD5:${md5} \n分片:${chunks} 大小:${
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
spark.destroy(); // 释放缓存
file.uniqueIdentifier = md5; // 将文件md5赋值给文件唯一标识
file.cmd5 = false; // 取消计算md5状态
this.computeMD5Success(file);
}

function loadNext() {
const start = currentChunk * chunkSize;
const end =
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}
},

async computeMD5Success(md5edFile) {
const file = await this.getSuccessChunks(md5edFile);
try {
if (file.uploadID == '' || file.uuid == '') {
// 未上传过
await this.newMultiUpload(file);
if (file.uploadID != '' && file.uuid != '') {
file.chunks = '';
this.multipartUpload(file);
} else {
// 失败如何处理
return;
}
return;
}

if (file.uploaded == '1') {
// 已上传成功
// 秒传
if (file.attachID == '0') {
// 删除数据集记录,未删除文件
await addAttachment(file);
}
//不同数据集上传同一个文件
if (file.datasetID != '' ) {
if (file.datasetName != "" && file.realName != "") {
var info = "该文件已上传,对应数据集(" + file.datasetName + ")-文件(" + file.realName + ")";
window.alert(info);
window.location.reload();
}
}
console.log('文件已上传完成');
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
this.finishUpload(file);
} else {
// 断点续传
this.multipartUpload(file);
}
} catch (error) {
this.emitDropzoneFailed(file);
console.log(error);
}

async function addAttachment(file) {
return await axios.post(
'/attachments/add',
qs.stringify({
uuid: file.uuid,
file_name: file.name,
size: file.size,
dataset_id: file.datasetId,
type: CloudBrainType,
_csrf: csrf,
})
);
}
},

async getSuccessChunks(file) {
const params = {
params: {
md5: file.uniqueIdentifier,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
};
try {
const response = await axios.get('/attachments/get_chunks', params);
file.uploadID = response.data.uploadID;
file.uuid = response.data.uuid;
file.uploaded = response.data.uploaded;
file.chunks = response.data.chunks;
file.attachID = response.data.attachID;
file.datasetID = response.data.datasetID;
file.datasetName = response.data.datasetName;
file.realName = response.data.fileName;
return file;
} catch (error) {
this.emitDropzoneFailed(file);
console.log('getSuccessChunks catch: ', error);
return null;
}
},

async newMultiUpload(file) {
const res = await axios.get('/attachments/new_multipart', {
params: {
totalChunkCounts: file.totalChunkCounts,
md5: file.uniqueIdentifier,
size: file.size,
fileType: file.type,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
});
file.uploadID = res.data.uploadID;
file.uuid = res.data.uuid;
},

multipartUpload(file) {
const blobSlice =
File.prototype.slice ||
File.prototype.mozSlice ||
File.prototype.webkitSlice,
chunkSize = 1024 * 1024 * 64,
chunks = Math.ceil(file.size / chunkSize),
fileReader = new FileReader(),
time = new Date().getTime();
let currentChunk = 0;

function loadNext() {
const start = currentChunk * chunkSize;
const end =
start + chunkSize >= file.size ? file.size : start + chunkSize;
fileReader.readAsArrayBuffer(blobSlice.call(file, start, end));
}

function checkSuccessChunks() {
const index = successChunks.indexOf((currentChunk + 1).toString());
if (index == -1) {
return false;
}
return true;
}

async function getUploadChunkUrl(currentChunk, partSize) {
const res = await axios.get('/attachments/get_multipart_url', {
params: {
uuid: file.uuid,
uploadID: file.uploadID,
size: partSize,
chunkNumber: currentChunk + 1,
type: CloudBrainType,
file_name: file.name,
_csrf: csrf
}
});
urls[currentChunk] = res.data.url;
}

async function uploadMinio(url, e) {
let urls = [];
const res = await axios.put(url, e.target.result, {
headers: {
'Content-Type': ''
}});
etags[currentChunk] = res.headers.etag;
}

async function uploadMinioNewMethod(url,e){
var xhr = new XMLHttpRequest();
xhr.open('PUT', url, false);
xhr.setRequestHeader('Content-Type', '')
xhr.send(e.target.result);
var etagValue = xhr.getResponseHeader('ETag');
//console.log(etagValue);
etags[currentChunk] = etagValue;
}

async function updateChunk(currentChunk) {
await axios.post(
'/attachments/update_chunk',
qs.stringify({
uuid: file.uuid,
chunkNumber: currentChunk + 1,
etag: etags[currentChunk],
type: CloudBrainType,
_csrf: csrf
})
);
}
async function uploadChunk(e) {
try {
if (!checkSuccessChunks()) {
const start = currentChunk * chunkSize;
const partSize =
start + chunkSize >= file.size ? file.size - start : chunkSize;
// 获取分片上传url
await getUploadChunkUrl(currentChunk, partSize);
if (urls[currentChunk] != '') {
// 上传到minio
await uploadMinioNewMethod(urls[currentChunk], e);
if (etags[currentChunk] != '') {
// 更新数据库:分片上传结果
//await updateChunk(currentChunk);
} else {
console.log("上传到minio uploadChunk etags[currentChunk] == ''");// TODO
}
} else {
console.log("uploadChunk urls[currentChunk] != ''");// TODO
}
}
} catch (error) {
this.emitDropzoneFailed(file);
console.log(error);
}
}

async function completeUpload() {
return await axios.post(
'/attachments/complete_multipart',
qs.stringify({
uuid: file.uuid,
uploadID: file.uploadID,
file_name: file.name,
size: file.size,
dataset_id: file.datasetId,
type: CloudBrainType,
_csrf: csrf
})
);
}

const successChunks = [];
let successParts = [];
successParts = file.chunks.split(',');
for (let i = 0; i < successParts.length; i++) {
successChunks[i] = successParts[i].split('-')[0];
}
const urls = []; // TODO const ?
const etags = [];
console.log('上传分片...');
this.status = this.dropzoneParams.data('uploading');
loadNext();
fileReader.onload = async (e) => {
await uploadChunk(e);
fileReader.abort();
currentChunk++;
if (currentChunk < chunks) {
console.log(
`第${currentChunk}个分片上传完成, 开始第${currentChunk +
1}/${chunks}个分片上传`
);
this.progress = Math.ceil((currentChunk / chunks) * 100);
this.updateProgress(file, ((currentChunk / chunks) * 100).toFixed(2));
this.status = `${this.dropzoneParams.data('uploading')} ${(
(currentChunk / chunks) *
100
).toFixed(2)}%`;
await loadNext();
} else {
await completeUpload();
console.log(
`文件上传完成:${file.name} \n分片:${chunks} 大小:${
file.size
} 用时:${(new Date().getTime() - time) / 1000} s`
);
this.progress = 100;
this.status = this.dropzoneParams.data('upload-complete');
this.finishUpload(file);
}
};
}
}
};
</script>

<style>
.dropzone-wrapper {
margin: 0;
}
.ui .dropzone {
border: 2px dashed #0087f5;
box-shadow: none !important;
padding: 0;
min-height: 5rem;
border-radius: 4px;
}
.dataset .dataset-files #dataset .dz-preview.dz-file-preview,
.dataset .dataset-files #dataset .dz-preview.dz-processing {
display: flex;
align-items: center;
}
.dataset .dataset-files #dataset .dz-preview {
border-bottom: 1px solid #dadce0;
min-height: 0;
}
.upload-info{
margin-top: 0.2em;
}
</style>

+ 57
- 6
web_src/js/components/UserAnalysis.vue View File

@@ -63,19 +63,28 @@
</template>
</el-table-column>
<el-table-column
prop="CodeMergeCount"
label="PR数"
prop="UserIndex"
label="归一化用户指数"
width="120px"
align="center">
</el-table-column>
<template slot-scope="scope">
{{scope.row.UserIndex | rounding}}
</template>
</el-table-column>
<el-table-column
prop="UserIndex"
prop="UserIndexPrimitive"
label="用户指数"
width="120px"
align="center">
<template slot-scope="scope">
{{scope.row.UserIndex | rounding}}
{{scope.row.UserIndexPrimitive | rounding}}
</template>
</el-table-column>
<el-table-column
prop="CodeMergeCount"
label="PR数"
align="center">
</el-table-column>
<el-table-column
prop="CommitCount"
label="commit数"
@@ -160,6 +169,48 @@
label="提交模型数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="FocusOtherUser"
label="关注他人数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectDataset"
label="收藏数据集"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectedDataset"
label="被收藏数据集"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RecommendDataset"
label="被推荐数据集数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectImage"
label="收藏镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="CollectedImage"
label="被收藏镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RecommendImage"
label="被推荐镜像数"
width="120px"
align="center">
</el-table-column>
<el-table-column
prop="RegistDate"
@@ -214,7 +265,7 @@
value_time: '',
search:'',
data:'',
columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'用户指数',key:'UserIndex'},{title:'系统统计时间',key:'CountDate'}],
columns: [{title: 'ID',key: 'ID'},{title: '用户名',key: 'Name'},{title: 'PR数',key: 'CodeMergeCount'},{title: 'commit数',key:'CommitCount'},{title: '提出任务数',key: 'IssueCount'},{title: '评论数',key: 'CommentCount'},{title: '关注项目数',key: 'FocusRepoCount'},{title: '点赞项目数',key: 'StarRepoCount'},{title: '登录次数',key: 'LoginCount'},{title:'关注者数',key:'WatchedCount'},{title:'commit代码行数',key:'CommitCodeSize'},{title:'已解决任务数',key:'SolveIssueCount'},{title:'百科页面贡献次数',key:'EncyclopediasCount'},{title:'创建项目',key:'CreateRepoCount'},{title:'用户注册时间',key:'RegistDate'},{title:'云脑任务数',key:'CloudBrainTaskNum'},{title:'云脑运行时间(小时)',key:'CloudBrainRunTime'},{title:'上传(提交)数据集文件数',key:'CommitDatasetNum'},{title:'提交模型数',key:'CommitModelCount'},{title:'归一化用户指数',key:'UserIndex'},{title:'用户指数',key:'UserIndexPrimitive'},{title:'关注他人数',key:'FocusOtherUser'},{title:'收藏数据集',key:'CollectDataset'},{title:'被收藏数据集',key:'CollectedDataset'},{title:'被推荐数据集数',key:'RecommendDataset'},{title:'收藏镜像数',key:'CollectImage'},{title:'被收藏镜像数',key:'CollectedImage'},{title:'被推荐镜像数',key:'RecommendImage'},{title:'系统统计时间',key:'CountDate'}],
blob:'',
fileName:'',
dynamic:7,


+ 16
- 18
web_src/js/components/images/adminImages.vue View File

@@ -1,6 +1,7 @@
<template>
<div>
<div >
<div class="ui container" style="width: 80%;">
<div class="ui grid">
<div class="row" style="border: 1px solid #d4d4d5;margin-top: 15px;padding-top: 0;">
<div class="ui attached segment">
@@ -11,24 +12,24 @@
</div>
</div>
</div>
<el-row style="padding: 1rem;">
<el-col :span="2" style="margin-right: 1rem;">
<el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox>
</el-col>
<el-col :span="6">
<el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;">

<div class="ui ten wide column" style="margin: 1rem 0;">
<el-checkbox v-model="checked" style="padding: 0.5rem 1rem;">仅显示平台推荐</el-checkbox>
<el-dropdown @command="handleCommand" trigger="click" style="border: 1px solid rgba(34,36,38,.15);border-radius: 4px;padding: 0.5rem 1rem;">
<span class="el-dropdown-link">
{{dropdownPrivate}}<i class="el-icon-caret-bottom el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item :command="{label:'全部',private:''}">全部</el-dropdown-item>
<el-dropdown-item :command="{label:'公开',private:false}">公开</el-dropdown-item>
<el-dropdown-item :command="{label:'公开',private:true}">私有</el-dropdown-item>
<el-dropdown-item :command="{label:'私有',private:true}">私有</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</el-col>
</el-row>
<el-row>
</el-dropdown>
</div>
<div class="ui six wide column right aligned" style="margin: 1rem 0;">
<a class="ui blue small button" href="/admin/images/commit_image">创建云脑镜像</a>
</div>
<div class="ui sixteen wide column" style="padding: 0;">
<el-table
:data="tableDataCustom"
style="width: 100%"
@@ -116,12 +117,8 @@
<svg width="1.4em" height="1.4em" viewBox="0 0 32 32" class="heart-stroke"><path d="M4.4 6.54c-1.761 1.643-2.6 3.793-2.36 6.056.24 2.263 1.507 4.521 3.663 6.534a29110.9 29110.9 0 0010.296 9.633l10.297-9.633c2.157-2.013 3.424-4.273 3.664-6.536.24-2.264-.599-4.412-2.36-6.056-1.73-1.613-3.84-2.29-6.097-1.955-1.689.25-3.454 1.078-5.105 2.394l-.4.319-.398-.319c-1.649-1.316-3.414-2.143-5.105-2.394a7.612 7.612 0 00-1.113-.081c-1.838 0-3.541.694-4.983 2.038z"></path></svg>
<span style="line-height: 2;margin-left:0.3rem;">{{scope.row.numStars}}</span>
</div>
<template v-if="!scope.row.isPrivate">
<span style="padding: 0 1rem;color: rgb(250, 140, 22);cursor:pointer;" v-if="scope.row.type==5" @click="unSetRecommend(scope.$index,scope.row.id)">取消推荐</span>
<span style="padding: 0 1rem;color: rgb(19, 194, 141);cursor:pointer;" v-else @click="setRecommend(scope.$index,scope.row.id)">设为推荐</span>
</template>
<span style="padding: 0 1rem;color: rgb(19, 194, 141);cursor:pointer;" v-if="scope.row.type!==5 && !scope.row.isPrivate" @click="setRecommend(scope.$index,scope.row.id)">设为推荐</span>
<span style="padding: 0 1rem;color:#0366d6;cursor:pointer;" @click="copyUrl(scope.row.place)">复制地址</span>
<div style="padding-left:1rem;cursor:pointer;">
<el-dropdown size="medium">
@@ -138,7 +135,7 @@
</template>
</el-table-column>
</el-table>
</el-row>
</div>
<div class="ui container" style="padding:2rem 0;text-align:center">
<el-pagination
background
@@ -152,6 +149,7 @@
</el-pagination>
</div>
</div>
</div>
</div>
</div>



+ 23
- 3
web_src/js/features/images.js View File

@@ -26,6 +26,15 @@ export default async function initImage(){
}
]
},
place:{
identifier : 'place',
rules: [
{
type: 'empty',
}
]
},
}
})
}
@@ -75,8 +84,9 @@ export default async function initImage(){
type:'POST',
data:formData,
success:function(res){
console.log(res)
if(res.Code===1){
$('.ui.info.message').text(res.Message).show().delay(1500).fadeOut();
$('.ui.negative.message').text(res.Message).show().delay(2500).fadeOut();
}else if(res.Code==0){
if(location.href.indexOf('imageAdmin')!==-1){
location.href = `${window.config.AppSubUrl}/admin/images`
@@ -105,6 +115,11 @@ export default async function initImage(){
$("textarea[name='description']").parent().addClass('error')
return false
}
if($("input[name='place']").length>0&&!$("input[name='place']").val()){
console.log("1111111",$("input[name='place']"))
$("input[name='place']").parent().addClass('error')
return false
}
const postData = {
_csrf:$("input[name='_csrf']").val(),
@@ -115,6 +130,10 @@ export default async function initImage(){
topics:$("input[name='topics']").val(),
id:$("input[name='id']").val()
}
if($("input[name='place']").val()&&$("input[name='isRecommend']:checked").val()){
postData.isRecommend = $("input[name='isRecommend']:checked").val()
postData.place = $("input[name='place']").val()
}
let formData = $params(postData)
if($("input[name='edit']").val()=="edit"){
postImage(formData)
@@ -143,15 +162,16 @@ export default async function initImage(){
}
})
}
return false
})
$('#cancel_submit_image').click(()=>{
console.log(pageform)
if(link.includes('cloudbrain')){
let repoLink = link.split('cloudbrain')[0]
location.href = `${window.config.AppSubUrl}${repoLink}debugjob?debugListType=all`
}else if(pageform=='imageSquare'){
location.href = `${window.config.AppSubUrl}/explore/images?type=myimage`
}else if(pageform=='imageAdmin'){
}else if(pageform){
location.href = `${window.config.AppSubUrl}/admin/images`
}
})


+ 122
- 41
web_src/js/index.js View File

@@ -34,7 +34,6 @@ import {
} from './features/notification.js';
import {createCodeEditor} from './features/codeeditor.js';
import MinioUploader from './components/MinioUploader.vue';
import ObsUploader from './components/ObsUploader.vue';
import EditAboutInfo from './components/EditAboutInfo.vue';
// import Images from './components/Images.vue';
import EditTopics from './components/EditTopics.vue';
@@ -184,11 +183,11 @@ function initBranchSelector() {
});
$selectBranch.find('.reference.column').on('click', function () {
$selectBranch.find('.scrolling.reference-list-menu').css('display', 'none');
$selectBranch.find('.reference .text').removeClass('black');
$selectBranch.find('.reference .text').addClass('black');
$($(this).data('target')).css('display', 'block');
$(this)
.find('.text')
.addClass('black');
.find('.text.black')
.removeClass('black');
return false;
});
}
@@ -231,7 +230,7 @@ function initLabelEdit() {
});
}

function updateIssuesMeta(url, action, issueIds, elementId, isAdd) {
function updateIssuesMeta(url, action, issueIds, elementId,isAdd) {
return new Promise((resolve) => {
$.ajax({
type: 'POST',
@@ -241,13 +240,14 @@ function updateIssuesMeta(url, action, issueIds, elementId, isAdd) {
action,
issue_ids: issueIds,
id: elementId,
is_add: isAdd
is_add: isAdd,
},
success: resolve
});
});
}


function initRepoStatusChecker() {
const migrating = $('#repo_migrating');
$('#repo_migrating_failed').hide();
@@ -487,12 +487,13 @@ function initCommentForm() {
const promises = [];
Object.keys(labels).forEach((elementId) => {
const label = labels[elementId];
console.log("label:",label)
const promise = updateIssuesMeta(
label['update-url'],
label.action,
label['issue-id'],
elementId,
label['is-checked']
label['is-checked'],
);
promises.push(promise);
});
@@ -532,7 +533,7 @@ function initCommentForm() {
'',
$listMenu.data('issue-id'),
$(this).data('id'),
$(this).data('is-checked')
$(this).data('is-checked'),
);
$listMenu.data('action', 'update'); // Update to reload the page when we updated items
return false;
@@ -604,6 +605,7 @@ function initCommentForm() {
$listMenu.data('issue-id'),
'',
''
).then(reload);
}

@@ -637,10 +639,16 @@ function initCommentForm() {
initListSubmits('select-reviewers-modify', 'assignees');

function selectItem(select_id, input_id) {
const $menu = $(`${select_id} .menu`);
let $menu;
if (select_id=='.select-branch'){
$menu = $(`${select_id} .menu`).eq(1);
}else{
$menu = $(`${select_id} .menu`);
}
const $list = $(`.ui${select_id}.list`);
const hasUpdateAction = $menu.data('action') === 'update';

$menu.find('.item:not(.no-select)').on('click', function () {
$(this)
.parent()
@@ -651,12 +659,17 @@ function initCommentForm() {

$(this).addClass('selected active');
if (hasUpdateAction) {
//let ref = ''
//if (select_id=='.select-branch'){
// ref = $(this).data('name');
// }

updateIssuesMeta(
$menu.data('update-url'),
'',
$menu.data('issue-id'),
$(this).data('id'),
$(this).data('is-checked')
$(this).data('is-checked'),
).then(reload);
}
switch (input_id) {
@@ -709,6 +722,7 @@ function initCommentForm() {
// Milestone and assignee
selectItem('.select-milestone', '#milestone_id');
selectItem('.select-assignee', '#assignee_id');
selectItem('.select-branch', '');
}

function initInstall() {
@@ -811,7 +825,7 @@ function initIssueComments() {
const issueId = $(this).data('issue-id');
const id = $(this).data('id');
const isChecked = $(this).data('is-checked');
//const ref = $(this).data('name');
event.preventDefault();
updateIssuesMeta(url, '', issueId, id, isChecked).then(reload);
});
@@ -2900,6 +2914,7 @@ $(document).ready(async () => {
})
.get()
.join();
console.log("this:",this)
const {url} = this.dataset;
if (elementId === '0' && url.substr(-9) === '/assignee') {
elementId = '';
@@ -2958,7 +2973,6 @@ $(document).ready(async () => {
initCodeView();
initVueApp();
initVueUploader();
initObsUploader();
initVueDataset();
initVueEditAbout();
initVueEditTopic();
@@ -3702,6 +3716,63 @@ function initVueEditAbout() {
}

function initVueDataset() {
if($('#dataset_check').length){
if(location.search.indexOf('recommend=true')!==-1){
$('#dataset_check').checkbox('set checked')
}else{
$('#dataset_check').checkbox('set unchecked')
}
$('#dataset_check').checkbox({
onChecked: function() {
if(location.search){
const params = new URLSearchParams(location.search)
if(params.has('recommend')){
params.delete('recommend')
location.href = AppSubUrl + location.pathname + '?' + params.toString() + '&recommend=true'
}else{
location.href = `${window.config.AppSubUrl}/admin/datasets${location.search}&recommend=true`
}
}else{
location.href = `${window.config.AppSubUrl}/admin/datasets?recommend=true`
}
},
onUnchecked: function() {
if(location.search=='?recommend=true'){
location.href = AppSubUrl + location.pathname
}else{
const params = new URLSearchParams(location.search)
params.delete('recommend')
location.href = AppSubUrl + location.pathname + '?' + params.toString()
}
},
})
}
$('.set_dataset').on('click', function(){
const $this = $(this);
let link = $this.data('url')
$.ajax({
url:link,
type:'PUT',
success:function(res){
console.log(res)
if(res.Code==0){
window.location.href = '/admin/datasets'
}else{
$('.ui.negative.message').text(res.Message).show().delay(1500).fadeOut();
}
},
error: function(xhr){
// 隐藏 loading
// 只有请求不正常(状态码不为200)才会执行
$('.ui.negative.message').html(xhr.responseText).show().delay(1500).fadeOut();
console.log(xhr)
},
complete:function(xhr){
// $("#mask").css({"display":"none","z-index":"1"})
}
})
});
const el = document.getElementById('dataset-base');
if (!el) {
return;
@@ -3759,24 +3830,14 @@ function initVueDataset() {
if(document.getElementById('dataset-file-desc')){
dataset_file_desc = document.getElementById('dataset-file-desc').value
}
// getEditInit(){
// if($('#dataset-edit-value')){
// $this = $('#dataset-edit-value')
// this.ruleForm.title = $this.data('edit-title') || ''
// this.ruleForm.description = $this.data('edit-description') || ''
// this.ruleForm.category = $this.data('edit-category') || ''
// this.ruleForm.task = $this.data('edit-task') || ''
// this.ruleForm.license = $this.data('edit-license') || ''
// this.ruleForm.id = $this.data('edit-id')|| ''
// }
// },
new Vue({
delimiters: ['${', '}'],
el,
data: {
suburl: AppSubUrl,
url:'',
checked:false,
clusterFlag:false,
type:0,
desc:'',
descfile:'',
@@ -3854,8 +3915,7 @@ function initVueDataset() {
},
},
components: {
MinioUploader,
ObsUploader
MinioUploader
},
mounted(){
// if(document.getElementById('postPath')){
@@ -3874,6 +3934,12 @@ function initVueDataset() {
this.getCurrentRepoDataset(this.repolink,this.cloudbrainType)
}
const params = new URLSearchParams(location.search)
if (params.has('recommend') && params.get('recommend')=='true'){
this.checked = true
}else{
this.checked = false
}
},
created(){
if(document.getElementById('postPath')){
@@ -3914,6 +3980,30 @@ function initVueDataset() {
}

},
handleCheckedChange(val){
if(val){
if(location.search){
const params = new URLSearchParams(location.search)
if(params.has('recommend')){
params.delete('recommend')
let search = params.toString()
location.href = `${AppSubUrl}/explore/datasets?${search}&recommend=${val}`
}else{
location.href = `${AppSubUrl}/explore/datasets${location.search}&recommend=${val}`
}
}else{
location.href = `${AppSubUrl}/explore/datasets?recommend=${val}`
}
}else{
if(location.search=='?recommend=true'){
location.href = AppSubUrl + location.pathname
}else{
const params = new URLSearchParams(location.search)
params.delete('recommend')
location.href = AppSubUrl + location.pathname + '?' + params.toString()
}
}
},
createDataset(formName){
let _this = this
this.$refs[formName].validate((valid)=>{
@@ -3953,7 +4043,8 @@ function initVueDataset() {
},
gotoUpload(repolink,datsetId){
location.href = `${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}`
// location.href = `${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}`
window.open(`${AppSubUrl}${repolink}/datasets/attachments/upload?datasetId=${datsetId}`,'_blank')
},
gotoDataset(datsetUrl){
location.href = datsetUrl
@@ -3961,6 +4052,9 @@ function initVueDataset() {
gotoAnnotate(repolink,uuid,type){
location.href = `${AppSubUrl}${repolink}/datasets/label/${uuid}?type=${type}`
},
setcluster(val){
this.clusterFlag = val
},
uploadGpu(){
this.type=0
},
@@ -4380,19 +4474,6 @@ function initVueDataAnalysis() {
render: h => h(DataAnalysis)
});
}
// 新增
function initObsUploader() {
const el = document.getElementById('obsUploader');
if (!el) {
return;
}

new Vue({
el: '#obsUploader',
components: {ObsUploader},
template: '<ObsUploader />'
});
}
function initVueWxAutorize() {
const el = document.getElementById('WxAutorize');
if (!el) {


+ 16
- 1
web_src/less/openi.less View File

@@ -248,7 +248,22 @@ footer .column{margin-bottom:0!important; padding-bottom:0!important;}
.icon-bind{background-position: -550px -52px;}
.icon-unbind{background-position: -568px -52px;}
.CREATING, .STOPPING, .DELETING, .STARTING, i.WAITING ,.INIT,.KILLING{display:inline-block;background-image:url('/img/loading.gif');background-repeat:no-repeat;width:16px;height:16px;background-size:16px 16px;margin-right:5px;}

.icon-to-top{
background:url("/img/icons.svg");
background-position: -540px -208px;
width: 30px;
height: 30px;
display: inline-block;
cursor: pointer;
}
.icon-to-bottom{
background:url("/img/icons.svg");
background-position: -574px -208px;
width: 30px;
height: 30px;
display: inline-block;
cursor: pointer;
}
i.COMPLETED,i.SUCCEEDED{display:inline-block;width:18px;height:18px;background:url("/img/icons.svg");background-position: -496px -52px;background-position: -441px -52px;}
.text_over{
overflow: hidden;


Loading…
Cancel
Save