diff --git a/models/ai_model_manage.go b/models/ai_model_manage.go index 24ee0c78f7..7eb21684b9 100644 --- a/models/ai_model_manage.go +++ b/models/ai_model_manage.go @@ -6,6 +6,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" "code.gitea.io/gitea/modules/timeutil" "xorm.io/builder" "xorm.io/xorm" @@ -25,6 +26,7 @@ type AiModelManage struct { Path string `xorm:"varchar(400) NOT NULL" json:"path"` DownloadCount int `xorm:"NOT NULL DEFAULT 0" json:"downloadCount"` Engine int64 `xorm:"NOT NULL DEFAULT 0" json:"engine"` + ComputeResource string `json:"computeResource"` Status int `xorm:"NOT NULL DEFAULT 0" json:"status"` StatusDesc string `xorm:"varchar(500)" json:"statusDesc"` Accuracy string `xorm:"varchar(1000)" json:"accuracy"` @@ -32,16 +34,42 @@ type AiModelManage struct { RepoId int64 `xorm:"INDEX NULL" json:"repoId"` CodeBranch string `xorm:"varchar(400) NULL" json:"codeBranch"` CodeCommitID string `xorm:"NULL" json:"codeCommitID"` + Recommend int `xorm:"NOT NULL DEFAULT 0" json:"recommend"` UserId int64 `xorm:"NOT NULL" json:"userId"` IsPrivate bool `xorm:"DEFAULT true" json:"isPrivate"` - UserName string `json:"userName"` - UserRelAvatarLink string `json:"userRelAvatarLink"` + UserName string `xorm:"-" json:"userName"` + UserRelAvatarLink string `xorm:"-" json:"userRelAvatarLink"` TrainTaskInfo string `xorm:"text NULL" json:"trainTaskInfo"` CreatedUnix timeutil.TimeStamp `xorm:"created" json:"createdUnix"` UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"` - IsCanOper bool `json:"isCanOper"` - IsCanDelete bool `json:"isCanDelete"` - IsCanDownload bool `json:"isCanDownload"` + IsCanOper bool `xorm:"-" json:"isCanOper"` + IsCanDelete bool `xorm:"-" json:"isCanDelete"` + IsCanDownload bool `xorm:"-" json:"isCanDownload"` + IsCollected bool `xorm:"-" json:"isCollected"` + RepoName string `xorm:"-" json:"repoName"` + RepoDisplayName string `xorm:"-" json:"repoDisplayName"` + RepoOwnerName string `xorm:"-" json:"repoOwnerName"` + ReferenceCount int `xorm:"NOT NULL DEFAULT 0" json:"referenceCount"` + CollectedCount int `xorm:"NOT NULL DEFAULT 0" json:"collectedCount"` + ModelFileList []storage.FileInfo `xorm:"-" json:"modelFileList"` +} + +type AiModelFile struct { + ID int64 `xorm:"pk autoincr"` + ModelID string `xorm:"UNIQUE(s)"` + Name string `xorm:"varchar(400) UNIQUE(s)"` + Path string `xorm:"varchar(400) NULL"` + Description string `xorm:"varchar(400) NULL"` + DownloadCount int64 `xorm:"DEFAULT 0"` + Size int64 `xorm:"DEFAULT 0"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` +} + +type AiModelCollect struct { + ID int64 `xorm:"pk autoincr"` + ModelID string `xorm:"UNIQUE(s)"` + UserId int64 `xorm:"UNIQUE(s)"` + CreatedUnix timeutil.TimeStamp `xorm:"created"` } type AiModelConvert struct { @@ -72,10 +100,10 @@ type AiModelConvert struct { UpdatedUnix timeutil.TimeStamp `xorm:"updated" json:"updatedUnix"` StartTime timeutil.TimeStamp `json:"startTime"` EndTime timeutil.TimeStamp `json:"endTime"` - UserName string `json:"userName"` - UserRelAvatarLink string `json:"userRelAvatarLink"` - IsCanOper bool `json:"isCanOper"` - IsCanDelete bool `json:"isCanDelete"` + UserName string `xorm:"-" json:"userName"` + UserRelAvatarLink string `xorm:"-" json:"userRelAvatarLink"` + IsCanOper bool `xorm:"-" json:"isCanOper"` + IsCanDelete bool `xorm:"-" json:"isCanDelete"` } type AiModelQueryOptions struct { @@ -86,10 +114,18 @@ type AiModelQueryOptions struct { SortType string New int // JobStatus CloudbrainStatus - Type int - Status int - IsOnlyThisRepo bool - IsQueryPrivate bool + Type int + Status int + IsOnlyThisRepo bool + IsQueryPrivate bool + IsRecommend bool + IsCollected bool + CollectedUserId int64 + Namelike string + LabelFilter string + FrameFilter int + ComputeResourceFilter string + NotNeedEmpty bool } func (a *AiModelConvert) IsGpuTrainTask() bool { @@ -310,6 +346,34 @@ func ModifyModelPrivate(id string, isPrivate bool) error { return nil } +func ModifyModelRecommend(id string, recommend int) error { + var sess *xorm.Session + sess = x.ID(id) + defer sess.Close() + re, err := sess.Cols("recommend").Update(&AiModelManage{ + Recommend: recommend, + }) + if err != nil { + return err + } + log.Info("success to update recommend from db.re=" + fmt.Sprint((re))) + return nil +} + +func ModifyModelCollectedNum(id string, collectedNum int) error { + var sess *xorm.Session + sess = x.ID(id) + defer sess.Close() + re, err := sess.Cols("collected_count").Update(&AiModelManage{ + CollectedCount: collectedNum, + }) + if err != nil { + return err + } + log.Info("success to update collectedNum from db.re=" + fmt.Sprint((re))) + return nil +} + func ModifyLocalModel(id string, name, label, description string, engine int, isPrivate bool) error { var sess *xorm.Session sess = x.ID(id) @@ -394,6 +458,29 @@ func QueryModelByName(name string, repoId int64) []*AiModelManage { return aiModelManageList } +func QueryModelByRepoId(repoId int64) []*AiModelManage { + sess := x.NewSession() + defer sess.Close() + sess.Select("*").Table("ai_model_manage"). + Where("repo_id=?", repoId) + aiModelManageList := make([]*AiModelManage, 0) + sess.Find(&aiModelManageList) + return aiModelManageList +} + +func DeleteModelByRepoId(repoId int64) error { + sess := x.NewSession() + defer sess.Close() + re, err := sess.Delete(&AiModelManage{ + RepoId: repoId, + }) + if err != nil { + return err + } + log.Info("success to delete DeleteModelByRepoId from db.re=" + fmt.Sprint((re))) + return nil +} + func QueryModelByPath(path string) (*AiModelManage, error) { modelManage := new(AiModelManage) has, err := x.Where("path=?", path).Get(modelManage) @@ -409,51 +496,74 @@ func QueryModelByPath(path string) (*AiModelManage, error) { func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) { sess := x.NewSession() defer sess.Close() - - var cond = builder.NewCond() + var where string + where += " ai_model_manage.user_id > 0 " if opts.RepoID > 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.repo_id": opts.RepoID}, - ) + where += " and ai_model_manage.repo_id= " + fmt.Sprint(opts.RepoID) } if opts.UserID > 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.user_id": opts.UserID}, - ) + where += " and ai_model_manage.user_id=" + fmt.Sprint(opts.UserID) } if opts.New >= 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.new": opts.New}, - ) + where += " and ai_model_manage.new=" + fmt.Sprint(opts.New) } if len(opts.ModelID) > 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.id": opts.ModelID}, - ) + where += " and ai_model_manage.id='" + fmt.Sprint(opts.ModelID) + "'" } if (opts.Type) >= 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.type": opts.Type}, - ) + where += " and ai_model_manage.type=" + fmt.Sprint(opts.Type) } if (opts.Status) >= 0 { - cond = cond.And( - builder.Eq{"ai_model_manage.status": opts.Status}, - ) + where += " and ai_model_manage.status=" + fmt.Sprint(opts.Status) } if !opts.IsQueryPrivate { - cond = cond.And( - builder.Eq{"ai_model_manage.is_private": false}, - ) + where += " and ai_model_manage.is_private=false" } - count, err := sess.Where(cond).Count(new(AiModelManage)) - if err != nil { - return nil, 0, fmt.Errorf("Count: %v", err) + if opts.IsRecommend { + where += " and ai_model_manage.recommend=1" + } + if opts.FrameFilter >= 0 { + if opts.FrameFilter == 2 { + where += " and ai_model_manage.engine in (2,121,122)" + } else { + where += " and ai_model_manage.engine=" + fmt.Sprint(opts.FrameFilter) + } + } + if opts.LabelFilter != "" { + where += " and ai_model_manage.label ILIKE '%" + opts.LabelFilter + "%'" + } + if opts.ComputeResourceFilter != "" { + where += " and ai_model_manage.compute_resource ILIKE '%" + opts.ComputeResourceFilter + "%'" + } + if opts.Namelike != "" { + where += " and ( ai_model_manage.name ILIKE '%" + opts.Namelike + "%'" + where += " or ai_model_manage.description ILIKE '%" + opts.Namelike + "%'" + where += " or ai_model_manage.label ILIKE '%" + opts.Namelike + "%')" + } + if opts.NotNeedEmpty { + where += " and ai_model_manage.size > 0 " + } + var count int64 + var err error + if opts.IsCollected { + where += " and ai_model_collect.user_id=" + fmt.Sprint(opts.CollectedUserId) + + count, err = sess.Join("INNER", "ai_model_collect", "ai_model_manage.id = ai_model_collect.model_id").Where(where).Count(new(AiModelManage)) + if err != nil { + log.Info("error=" + err.Error()) + return nil, 0, fmt.Errorf("Count: %v", err) + } + } else { + count, err = sess.Where(where).Count(new(AiModelManage)) + if err != nil { + log.Info("error=" + err.Error()) + return nil, 0, fmt.Errorf("Count: %v", err) + } } if opts.Page >= 0 && opts.PageSize > 0 { @@ -465,11 +575,18 @@ func QueryModel(opts *AiModelQueryOptions) ([]*AiModelManage, int64, error) { } sess.Limit(opts.PageSize, start) } - - sess.OrderBy("ai_model_manage.created_unix DESC") + if opts.IsCollected { + sess.Join("INNER", "ai_model_collect", "ai_model_manage.id = ai_model_collect.model_id") + } + orderby := "ai_model_manage.created_unix desc" + if opts.SortType != "" { + orderby = opts.SortType + } + sess.OrderBy(orderby) aiModelManages := make([]*AiModelManage, 0, setting.UI.IssuePagingNum) - if err := sess.Table("ai_model_manage").Where(cond). + if err := sess.Table("ai_model_manage").Where(where). Find(&aiModelManages); err != nil { + log.Info("error=" + err.Error()) return nil, 0, fmt.Errorf("Find: %v", err) } @@ -551,3 +668,109 @@ func QueryModelConvert(opts *AiModelQueryOptions) ([]*AiModelConvert, int64, err return aiModelManageConvert, count, nil } + +func SaveModelCollect(modelCollect *AiModelCollect) error { + sess := x.NewSession() + defer sess.Close() + re, err := sess.Insert(modelCollect) + if err != nil { + log.Info("insert AiModelCollect error." + err.Error()) + return err + } + log.Info("success to save AiModelCollect db.re=" + fmt.Sprint((re))) + return nil +} + +func DeleteModelCollect(modelCollect *AiModelCollect) error { + sess := x.NewSession() + defer sess.Close() + re, err := sess.Delete(modelCollect) + if err != nil { + log.Info("delete AiModelCollect error." + err.Error()) + return err + } + log.Info("success to delete AiModelCollect db.re=" + fmt.Sprint((re))) + return nil +} + +func QueryModelCollectNum(modelId string) int { + sess := x.NewSession() + defer sess.Close() + modelCollects := make([]*AiModelCollect, 0) + err := sess.Table(new(AiModelCollect)).Where("model_id=?", modelId).Find(&modelCollects) + if err == nil { + return len(modelCollects) + } + return 0 +} +func QueryModelCollectByUserId(modelId string, userId int64) []*AiModelCollect { + sess := x.NewSession() + defer sess.Close() + modelCollects := make([]*AiModelCollect, 0) + err := sess.Table(new(AiModelCollect)).Where("model_id=? and user_id=?", modelId, userId).Find(&modelCollects) + if err == nil { + return modelCollects + } + return nil +} + +func QueryModelCollectedStatus(modelIds []string, userId int64) map[string]*AiModelCollect { + sess := x.NewSession() + defer sess.Close() + modelCollects := make([]*AiModelCollect, 0) + var cond = builder.NewCond() + cond = cond.And( + builder.In("model_id", modelIds), + ) + cond = cond.And( + builder.Eq{"user_id": userId}, + ) + result := make(map[string]*AiModelCollect, 0) + err := sess.Table(new(AiModelCollect)).Where(cond).Find(&modelCollects) + if err == nil { + for _, v := range modelCollects { + result[v.ModelID] = v + } + } + return result +} + +func SaveModelFile(modelFile *AiModelFile) error { + sess := x.NewSession() + defer sess.Close() + re, err := sess.Insert(modelFile) + if err != nil { + log.Info("insert modelFile error." + err.Error()) + return err + } + log.Info("success to save modelFile db.re=" + fmt.Sprint((re))) + return nil +} + +func DeleteModelFile(modelFile *AiModelFile) error { + sess := x.NewSession() + defer sess.Close() + re, err := sess.Delete(modelFile) + if err != nil { + log.Info("delete modelFile error." + err.Error()) + return err + } + log.Info("success to delete modelFile db.re=" + fmt.Sprint((re))) + return nil +} + +func QueryModelFileByModelId(modelId string) []*AiModelFile { + sess := x.NewSession() + defer sess.Close() + modelFileList := make([]*AiModelFile, 0) + var cond = builder.NewCond() + cond = cond.And( + builder.Eq{"model_id": modelId}, + ) + result := make([]*AiModelFile, 0) + err := sess.Table(new(AiModelFile)).Where(cond).Find(&modelFileList) + if err != nil { + log.Info("query AiModelFile failed, err=" + err.Error()) + } + return result +} diff --git a/models/cloudbrain.go b/models/cloudbrain.go index 0591ab568e..a66c47786c 100755 --- a/models/cloudbrain.go +++ b/models/cloudbrain.go @@ -205,6 +205,9 @@ type Cloudbrain struct { ModelName string //模型名称 ModelVersion string //模型版本 CkptName string //权重文件名称 + ModelId string //模型ID + ModelRepoName string `xorm:"-"` + ModelRepoOwnerName string `xorm:"-"` PreTrainModelUrl string //预训练模型地址 ResultUrl string //推理结果的obs路径 ResultJson string `xorm:"varchar(4000)"` @@ -2071,12 +2074,28 @@ func CreateCloudbrain(cloudbrain *Cloudbrain) (err error) { } } session.Commit() - + increaseModelReference(session, cloudbrain.ModelId) go IncreaseDatasetUseCount(cloudbrain.Uuid) go OperateRepoAITaskNum(cloudbrain.RepoID, 1) + //go IncreaseModelRefernceCount(cloudbrain) return nil } +func increaseModelReference(session *xorm.Session, modelId string) { + if modelId != "" { + log.Info("increase model count.") + if _, err := session.Exec("UPDATE `ai_model_manage` SET reference_count = reference_count + 1 WHERE id = ?", modelId); err != nil { + log.Info("err=" + err.Error()) + } + } +} + +// func IncreaseModelRefernceCount(cloudbrain *Cloudbrain) { +// if cloudbrain.ModelId != "" { +// AddModelInferenceCount(cloudbrain.ModelId) +// } +// } + func getRepoCloudBrain(cb *Cloudbrain) (*Cloudbrain, error) { has, err := x.Get(cb) if err != nil { @@ -2490,7 +2509,7 @@ func RestartCloudbrain(old *Cloudbrain, new *Cloudbrain) (err error) { if err = sess.Commit(); err != nil { return err } - + increaseModelReference(sess, new.ModelId) go IncreaseDatasetUseCount(new.Uuid) return nil } @@ -2628,7 +2647,6 @@ func CloudbrainAll(opts *CloudbrainsOptions) ([]*CloudbrainInfo, int64, error) { Join("left", "`user`", condition). Join("left", "cloudbrain_spec", "cloudbrain.id = cloudbrain_spec.cloudbrain_id"). Count(new(CloudbrainInfo)) - } if err != nil { @@ -2982,3 +3000,15 @@ func LoadSpecs4CloudbrainInfo(tasks []*CloudbrainInfo) error { } return nil } + +func GetCloudBrainByModelId(modelId string) ([]*Cloudbrain, error) { + cloudBrains := make([]*Cloudbrain, 0) + err := x.AllCols().Where("model_id=?", modelId).OrderBy("created_unix asc").Find(&cloudBrains) + return cloudBrains, err +} + +func GetCloudBrainByRepoIdAndModelName(repoId int64, modelName string) ([]*Cloudbrain, error) { + cloudBrains := make([]*Cloudbrain, 0) + err := x.AllCols().Where("model_name=? and repo_id=?", modelName, repoId).OrderBy("created_unix asc").Find(&cloudBrains) + return cloudBrains, err +} diff --git a/models/models.go b/models/models.go index eea3bd1833..cbb377e8a9 100755 --- a/models/models.go +++ b/models/models.go @@ -170,6 +170,8 @@ func init() { new(TechConvergeBaseInfo), new(RepoConvergeInfo), new(UserRole), + new(AiModelCollect), + new(AiModelFile), new(ModelMigrateRecord), ) diff --git a/models/repo.go b/models/repo.go index 960c02ee1e..1371ccecd3 100755 --- a/models/repo.go +++ b/models/repo.go @@ -1706,7 +1706,9 @@ func updateRepository(e Engine, repo *Repository, visibilityChanged bool) (err e if err != nil { return err } - + _, err = e.Where("repo_id = ?", repo.ID).Cols("is_private").Update(&AiModelManage{ + IsPrivate: true, + }) } else { //If repo has become public, we need set dataset to public _, err = e.Where("repo_id = ? and status <> 2", repo.ID).Cols("status").Update(&Dataset{ @@ -1870,6 +1872,9 @@ func DeleteRepository(doer *User, uid, repoID int64) error { // Delete dataset attachment record and remove related files deleteDatasetAttachmentByRepoId(sess, repoID) + + deleteModelByRepoId(repoID) + if err = deleteBeans(sess, &Access{RepoID: repo.ID}, &Action{RepoID: repo.ID}, @@ -2055,6 +2060,22 @@ func DeleteRepository(doer *User, uid, repoID int64) error { return nil } +func deleteModelByRepoId(repoId int64) { + models := QueryModelByRepoId(repoId) + if models != nil { + for _, model := range models { + log.Info("bucket=" + setting.Bucket + " path=" + model.Path) + if len(model.Path) > (len(setting.Bucket) + 1) { + err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:]) + if err != nil { + log.Info("Failed to delete model. id=" + model.ID) + } + } + } + } + DeleteModelByRepoId(repoId) +} + func deleteDatasetAttachmentByRepoId(sess *xorm.Session, repoId int64) error { attachments := make([]*Attachment, 0) if err := sess.Join("INNER", "dataset", "dataset.id = attachment.dataset_id"). diff --git a/modules/auth/cloudbrain.go b/modules/auth/cloudbrain.go index 7c3267e5ca..97b654d166 100755 --- a/modules/auth/cloudbrain.go +++ b/modules/auth/cloudbrain.go @@ -26,6 +26,7 @@ type CreateCloudBrainForm struct { ModelName string `form:"model_name"` ModelVersion string `form:"model_version"` CkptName string `form:"ckpt_name"` + ModelId string `form:"model_id"` LabelName string `form:"label_names"` PreTrainModelUrl string `form:"pre_train_model_url"` DatasetName string `form:"dataset_name"` @@ -68,7 +69,7 @@ type CreateCloudBrainInferencForm struct { JobType string `form:"job_type" binding:"Required"` BenchmarkCategory string `form:"get_benchmark_category"` GpuType string `form:"gpu_type"` - TrainUrl string `form:"train_url"` + PreTrainModelUrl string `form:"pre_train_model_url"` TestUrl string `form:"test_url"` Description string `form:"description"` ResourceSpecId int `form:"resource_spec_id" binding:"Required"` @@ -79,6 +80,7 @@ type CreateCloudBrainInferencForm struct { ModelVersion string `form:"model_version" binding:"Required"` CkptName string `form:"ckpt_name" binding:"Required"` LabelName string `form:"label_names" binding:"Required"` + ModelId string `form:"model_id" binding:"Required"` DatasetName string `form:"dataset_name"` SpecId int64 `form:"spec_id"` } diff --git a/modules/auth/grampus.go b/modules/auth/grampus.go index 59b75e1cb7..3ded77f763 100755 --- a/modules/auth/grampus.go +++ b/modules/auth/grampus.go @@ -21,6 +21,7 @@ type CreateGrampusTrainJobForm struct { ModelName string `form:"model_name"` ModelVersion string `form:"model_version"` CkptName string `form:"ckpt_name"` + ModelId string `form:"model_id"` LabelName string `form:"label_names"` PreTrainModelUrl string `form:"pre_train_model_url"` SpecId int64 `form:"spec_id"` @@ -44,6 +45,7 @@ type CreateGrampusNotebookForm struct { ModelName string `form:"model_name"` ModelVersion string `form:"model_version"` CkptName string `form:"ckpt_name"` + ModelId string `form:"model_id"` LabelName string `form:"label_names"` PreTrainModelUrl string `form:"pre_train_model_url"` SpecId int64 `form:"spec_id" binding:"Required"` diff --git a/modules/auth/modelarts.go b/modules/auth/modelarts.go index a450a565c1..0b078df447 100755 --- a/modules/auth/modelarts.go +++ b/modules/auth/modelarts.go @@ -25,6 +25,7 @@ type CreateModelArtsNotebookForm struct { ModelName string `form:"model_name"` ModelVersion string `form:"model_version"` CkptName string `form:"ckpt_name"` + ModelId string `form:"model_id"` LabelName string `form:"label_names"` PreTrainModelUrl string `form:"pre_train_model_url"` SpecId int64 `form:"spec_id" binding:"Required"` @@ -56,6 +57,7 @@ type CreateModelArtsTrainJobForm struct { EngineName string `form:"engine_names" binding:"Required"` SpecId int64 `form:"spec_id" binding:"Required"` ModelName string `form:"model_name"` + ModelId string `form:"model_id"` ModelVersion string `form:"model_version"` CkptName string `form:"ckpt_name"` LabelName string `form:"label_names"` @@ -79,10 +81,11 @@ type CreateModelArtsInferenceJobForm struct { FlavorName string `form:"flaver_names" binding:"Required"` EngineName string `form:"engine_names" binding:"Required"` LabelName string `form:"label_names" binding:"Required"` - TrainUrl string `form:"train_url" binding:"Required"` + PreTrainModelUrl string `form:"pre_train_model_url" binding:"Required"` ModelName string `form:"model_name" binding:"Required"` ModelVersion string `form:"model_version" binding:"Required"` CkptName string `form:"ckpt_name" binding:"Required"` + ModelId string `form:"model_id"` SpecId int64 `form:"spec_id" binding:"Required"` } diff --git a/modules/cloudbrain/cloudbrain.go b/modules/cloudbrain/cloudbrain.go index 9833327b1a..e2802a7961 100755 --- a/modules/cloudbrain/cloudbrain.go +++ b/modules/cloudbrain/cloudbrain.go @@ -79,6 +79,7 @@ type GenerateCloudBrainTaskReq struct { ModelName string ModelVersion string CkptName string + ModelId string LabelName string PreTrainModelPath string PreTrainModelUrl string @@ -358,6 +359,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) (string, error) { ModelName: req.ModelName, ModelVersion: req.ModelVersion, CkptName: req.CkptName, + ModelId: req.ModelId, ResultUrl: req.ResultPath, LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, @@ -475,18 +477,13 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e } if task.PreTrainModelUrl != "" { //预训练 - _, err := models.QueryModelByPath(task.PreTrainModelUrl) - if err != nil { - log.Warn("The model may be deleted", err) - } else { - volumes = append(volumes, models.Volume{ - HostPath: models.StHostPath{ - Path: setting.Attachment.Minio.RealPath + task.PreTrainModelUrl, - MountPath: PretrainModelMountPath, - ReadOnly: true, - }, - }) - } + volumes = append(volumes, models.Volume{ + HostPath: models.StHostPath{ + Path: setting.Attachment.Minio.RealPath + task.PreTrainModelUrl, + MountPath: PretrainModelMountPath, + ReadOnly: true, + }, + }) } createTime := timeutil.TimeStampNow() @@ -549,6 +546,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e LabelName: task.LabelName, PreTrainModelUrl: task.PreTrainModelUrl, CkptName: task.CkptName, + ModelId: task.ModelId, } err = models.RestartCloudbrain(task, newTask) @@ -698,6 +696,7 @@ type GenerateModelArtsNotebookReq struct { ModelName string LabelName string CkptName string + ModelId string ModelVersion string PreTrainModelUrl string } diff --git a/modules/convert/cloudbrain.go b/modules/convert/cloudbrain.go index 73e37b1eac..99587402d9 100644 --- a/modules/convert/cloudbrain.go +++ b/modules/convert/cloudbrain.go @@ -31,10 +31,10 @@ func ToCloudBrain(task *models.Cloudbrain) *api.Cloudbrain { VersionName: task.VersionName, ModelVersion: task.ModelVersion, CkptName: task.CkptName, - - StartTime: int64(task.StartTime), - EndTime: int64(task.EndTime), - Spec: ToSpecification(task.Spec), + ModelId: task.ModelId, + StartTime: int64(task.StartTime), + EndTime: int64(task.EndTime), + Spec: ToSpecification(task.Spec), } } func ToAttachment(attachment *models.Attachment) *api.AttachmentShow { diff --git a/modules/grampus/grampus.go b/modules/grampus/grampus.go index d0da396608..21146b2b4c 100755 --- a/modules/grampus/grampus.go +++ b/modules/grampus/grampus.go @@ -1,6 +1,7 @@ package grampus import ( + "encoding/json" "fmt" "strconv" "strings" @@ -78,6 +79,7 @@ type GenerateTrainJobReq struct { ModelName string LabelName string CkptName string + ModelId string ModelVersion string PreTrainModelPath string PreTrainModelUrl string @@ -103,6 +105,7 @@ type GenerateNotebookJobReq struct { ModelName string LabelName string CkptName string + ModelId string ModelVersion string PreTrainModelPath string PreTrainModelUrl string @@ -227,7 +230,7 @@ func GenerateNotebookJob(ctx *context.Context, req *GenerateNotebookJobReq) (job EndPoint: getEndPoint(), ReadOnly: true, ObjectKey: req.PreTrainModelPath, - ContainerPath: cloudbrain.PretrainModelMountPath, + ContainerPath: cloudbrain.PretrainModelMountPath + "/" + req.CkptName, }) } @@ -248,7 +251,8 @@ func GenerateNotebookJob(ctx *context.Context, req *GenerateNotebookJobReq) (job log.Info("debug command:" + req.Command) } - + datasetGrampusJson, _ := json.Marshal(datasetGrampus) + log.Info("datasetGrampusJson=" + string(datasetGrampusJson)) jobResult, err := createNotebookJob(models.CreateGrampusNotebookRequest{ Name: req.JobName, Tasks: []models.GrampusNotebookTask{ @@ -299,6 +303,7 @@ func GenerateNotebookJob(ctx *context.Context, req *GenerateNotebookJobReq) (job LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, }) if err != nil { @@ -355,13 +360,13 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str } else if ProcessorTypeGPU == req.ProcessType { datasetGrampus = getDatasetGPUGrampus(req.DatasetInfos, "/tmp/dataset") if len(req.ModelName) != 0 { - modelGrampus = []models.GrampusDataset{ + modelGrampus = []models.GrampusDataset{ //model save as obs { Name: req.ModelName, - Bucket: setting.Attachment.Minio.Bucket, - EndPoint: setting.Attachment.Minio.Endpoint, - ObjectKey: req.PreTrainModelPath, + Bucket: setting.Bucket, + EndPoint: getEndPoint(), ReadOnly: true, + ObjectKey: req.PreTrainModelPath, ContainerPath: "/tmp/pretrainmodel/" + req.CkptName, }, } @@ -382,13 +387,13 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str } else if ProcessorTypeGCU == req.ProcessType { datasetGrampus = getDatasetGCUGrampus(req.DatasetInfos, "/tmp/dataset") if len(req.ModelName) != 0 { - modelGrampus = []models.GrampusDataset{ + modelGrampus = []models.GrampusDataset{ //model save as obs { Name: req.ModelName, - Bucket: setting.Attachment.Minio.Bucket, - EndPoint: setting.Attachment.Minio.Endpoint, - ObjectKey: req.PreTrainModelPath, + Bucket: setting.Bucket, + EndPoint: getEndPoint(), ReadOnly: true, + ObjectKey: req.PreTrainModelPath, ContainerPath: "/tmp/pretrainmodel/" + req.CkptName, }, } @@ -407,6 +412,9 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str } } + modelGrampusJson, _ := json.Marshal(modelGrampus) + log.Info("train job modelGrampus=" + string(modelGrampusJson)) + jobResult, err := createJob(models.CreateGrampusJobRequest{ Name: req.JobName, Tasks: []models.GrampusTasks{ @@ -465,6 +473,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, }) if err != nil { diff --git a/modules/modelarts/modelarts.go b/modules/modelarts/modelarts.go index 81f7eebb5c..2cf6330f00 100755 --- a/modules/modelarts/modelarts.go +++ b/modules/modelarts/modelarts.go @@ -91,6 +91,7 @@ type GenerateTrainJobReq struct { ModelName string LabelName string CkptName string + ModelId string ModelVersion string PreTrainModelUrl string } @@ -122,6 +123,7 @@ type GenerateInferenceJobReq struct { ModelName string ModelVersion string CkptName string + ModelId string ResultUrl string Spec *models.Specification DatasetName string @@ -244,6 +246,7 @@ func GenerateNotebook2(ctx *context.Context, req cloudbrain.GenerateModelArtsNot LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, } err = models.CreateCloudbrain(task) @@ -366,6 +369,7 @@ func GenerateTrainJob(ctx *context.Context, req *GenerateTrainJobReq) (jobId str LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, }) if createErr != nil { @@ -526,6 +530,7 @@ func GenerateTrainJobVersion(ctx *context.Context, req *GenerateTrainJobReq, job LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, }) if createErr != nil { log.Error("CreateCloudbrain(%s) failed:%v", req.JobName, createErr.Error()) @@ -706,6 +711,7 @@ func GenerateInferenceJob(ctx *context.Context, req *GenerateInferenceJobReq) (j ModelName: req.ModelName, ModelVersion: req.ModelVersion, CkptName: req.CkptName, + ModelId: req.ModelId, ResultUrl: req.ResultUrl, CreatedUnix: createTime, UpdatedUnix: createTime, diff --git a/modules/modelarts_cd/modelarts.go b/modules/modelarts_cd/modelarts.go index 9d74c0919a..ace088f876 100755 --- a/modules/modelarts_cd/modelarts.go +++ b/modules/modelarts_cd/modelarts.go @@ -154,6 +154,7 @@ func GenerateNotebook(ctx *context.Context, req cloudbrain.GenerateModelArtsNote LabelName: req.LabelName, PreTrainModelUrl: req.PreTrainModelUrl, CkptName: req.CkptName, + ModelId: req.ModelId, } err = models.CreateCloudbrain(task) diff --git a/modules/setting/setting.go b/modules/setting/setting.go index 99029884f8..b67f0713b8 100755 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -1757,7 +1757,7 @@ func getModelConvertConfig() { ModelConvert.GPU_PYTORCH_IMAGE = sec.Key("GPU_PYTORCH_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:tensorRT_7_zouap") ModelConvert.GpuQueue = sec.Key("GpuQueue").MustString("openidgx") ModelConvert.GPU_TENSORFLOW_IMAGE = sec.Key("GPU_TENSORFLOW_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:tf2onnx") - ModelConvert.NPU_MINDSPORE_16_IMAGE = sec.Key("NPU_MINDSPORE_16_IMAGE").MustString("swr.cn-south-222.ai.pcl.cn/openi/mindspore1.6.1_train_v1_openi:v3_ascend") + ModelConvert.NPU_MINDSPORE_16_IMAGE = sec.Key("NPU_MINDSPORE_16_IMAGE").MustString("swr.cn-south-222.ai.pcl.cn/openi/mindspore1.8.1_train_openi_new:v1") ModelConvert.PytorchOnnxBootFile = sec.Key("PytorchOnnxBootFile").MustString("convert_pytorch.py") ModelConvert.PytorchTrTBootFile = sec.Key("PytorchTrTBootFile").MustString("convert_pytorch_tensorrt.py") ModelConvert.MindsporeBootFile = sec.Key("MindsporeBootFile").MustString("convert_mindspore.py") @@ -1767,8 +1767,8 @@ func getModelConvertConfig() { ModelConvert.GPU_Resource_Specs_ID = sec.Key("GPU_Resource_Specs_ID").MustInt(1) ModelConvert.NPU_FlavorCode = sec.Key("NPU_FlavorCode").MustString("modelarts.bm.910.arm.public.1") ModelConvert.NPU_PoolID = sec.Key("NPU_PoolID").MustString("pool7908321a") - ModelConvert.NPU_MINDSPORE_IMAGE_ID = sec.Key("NPU_MINDSPORE_IMAGE_ID").MustInt(121) - ModelConvert.NPU_TENSORFLOW_IMAGE_ID = sec.Key("NPU_TENSORFLOW_IMAGE_ID").MustInt(35) + ModelConvert.NPU_MINDSPORE_IMAGE_ID = sec.Key("NPU_MINDSPORE_IMAGE_ID").MustInt(37) + ModelConvert.NPU_TENSORFLOW_IMAGE_ID = sec.Key("NPU_TENSORFLOW_IMAGE_ID").MustInt(38) ModelConvert.GPU_PADDLE_IMAGE = sec.Key("GPU_PADDLE_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:paddle2.3.0_gpu_cuda11.2_cudnn8") ModelConvert.GPU_MXNET_IMAGE = sec.Key("GPU_MXNET_IMAGE").MustString("dockerhub.pcl.ac.cn:5000/user-images/openi:mxnet191cu_cuda102_py37") ModelConvert.PaddleOnnxBootFile = sec.Key("PaddleOnnxBootFile").MustString("convert_paddle.py") diff --git a/modules/storage/local.go b/modules/storage/local.go index 4f431f11a2..4a53cf7ead 100755 --- a/modules/storage/local.go +++ b/modules/storage/local.go @@ -89,6 +89,10 @@ func (l *LocalStorage) UploadObject(fileName, filePath string) error { return nil } +func (l *LocalStorage) UploadContent(bucketName string, path string, r io.Reader) (int64, error) { + return int64(0), nil +} + func (l *LocalStorage) DeleteDir(dir string) error { return nil } diff --git a/modules/storage/minio.go b/modules/storage/minio.go index a1a6e131a3..6249cd82c3 100755 --- a/modules/storage/minio.go +++ b/modules/storage/minio.go @@ -163,6 +163,10 @@ func (m *MinioStorage) UploadObject(fileName, filePath string) error { return err } +func (m *MinioStorage) UploadContent(bucketName string, path string, r io.Reader) (int64, error) { + return m.client.PutObject(bucketName, path, r, -1, minio.PutObjectOptions{ContentType: "application/octet-stream"}) +} + func GetMinioPath(jobName, suffixPath string) string { return setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + jobName + suffixPath } diff --git a/modules/storage/obs.go b/modules/storage/obs.go index f5764cd777..00731dda3f 100755 --- a/modules/storage/obs.go +++ b/modules/storage/obs.go @@ -371,11 +371,13 @@ func GetOneLevelAllObjectUnderDir(bucket string, prefixRootPath string, relative for { output, err := ObsCli.ListObjects(input) if err == nil { - log.Info("Page:%d\n", index) + log.Info("Page:%d\n input.Prefix=v%", index, input.Prefix) + log.Info("input.Prefix=" + input.Prefix) index++ for _, val := range output.Contents { var isDir bool var fileName string + log.Info("val.key=" + val.Key) if val.Key == input.Prefix { continue } @@ -707,3 +709,33 @@ func IsObjectExist4Obs(bucket, key string) (bool, error) { } return true, nil } + +func PutStringToObs(bucket, key string, fileContent string) error { + log.Info("PutStringToObs bucket=" + bucket + " key=" + key) + input := &obs.PutObjectInput{} + input.Bucket = bucket + input.Key = key + input.Body = strings.NewReader(fileContent) + _, err := ObsCli.PutObject(input) + if err != nil { + if obsError, ok := err.(obs.ObsError); ok { + log.Info("Message:%s\n", obsError.Message) + } + } + return err +} + +func PutReaderToObs(bucket, key string, reader io.Reader) error { + log.Info("PutStringToObs bucket=" + bucket + " key=" + key) + input := &obs.PutObjectInput{} + input.Bucket = bucket + input.Key = key + input.Body = reader + _, err := ObsCli.PutObject(input) + if err != nil { + if obsError, ok := err.(obs.ObsError); ok { + log.Info("Message:%s\n", obsError.Message) + } + } + return err +} diff --git a/modules/storage/storage.go b/modules/storage/storage.go index 7b46f94b20..b08969f1be 100755 --- a/modules/storage/storage.go +++ b/modules/storage/storage.go @@ -5,12 +5,13 @@ package storage import ( + "fmt" + "io" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/obs" "code.gitea.io/gitea/modules/setting" - "fmt" "github.com/minio/minio-go" - "io" ) const ( @@ -29,6 +30,7 @@ type ObjectStorage interface { PresignedPutURL(path string) (string, error) HasObject(path string) (bool, error) UploadObject(fileName, filePath string) error + UploadContent(bucketName string, path string, r io.Reader) (int64, error) } // Copy copys a file from source ObjectStorage to dest ObjectStorage diff --git a/modules/structs/cloudbrain.go b/modules/structs/cloudbrain.go index 4e3b77eb32..6fd162dd86 100644 --- a/modules/structs/cloudbrain.go +++ b/modules/structs/cloudbrain.go @@ -16,6 +16,7 @@ type CreateGrampusTrainJobOption struct { ModelName string `json:"model_name"` ModelVersion string `json:"model_version"` CkptName string `json:"ckpt_name"` + ModelId string `json:"model_id"` LabelName string `json:"label_names"` PreTrainModelUrl string `json:"pre_train_model_url"` SpecId int64 `json:"spec_id" binding:"Required"` @@ -36,6 +37,7 @@ type CreateTrainJobOption struct { ModelName string `json:"model_name"` ModelVersion string `json:"model_version"` CkptName string `json:"ckpt_name"` + ModelId string `json:"model_id"` LabelName string `json:"label_names"` PreTrainModelUrl string `json:"pre_train_model_url"` SpecId int64 `json:"spec_id" binding:"Required"` @@ -52,6 +54,7 @@ type CreateNotebookOption struct { ModelName string `json:"model_name"` ModelVersion string `json:"model_version"` CkptName string `json:"ckpt_name"` + ModelId string `json:"model_id"` LabelName string `json:"label_names"` PreTrainModelUrl string `json:"pre_train_model_url"` SpecId int64 `json:"spec_id" binding:"Required"` @@ -91,6 +94,7 @@ type Cloudbrain struct { ModelName string `json:"model_name"` //模型名称 ModelVersion string `json:"model_version"` //模型版本 CkptName string `json:"ckpt_name"` //权重文件名称 + ModelId string `json:"model_id"` //权重文件名称 StartTime int64 `json:"start_time"` EndTime int64 `json:"end_time"` VersionName string `json:"version_name"` diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index fee7dcfd63..3591ea72fb 100755 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -1059,6 +1059,7 @@ cloudbrain.time.starttime=Start run time cloudbrain.time.endtime=End run time cloudbrain.datasetdownload=Dataset download url model_manager = Model +model_square = Model Square model_experience = Model Experience model_noright=You have no right to do the operation. model_rename=Duplicate model name, please modify model name. @@ -1263,6 +1264,7 @@ modelarts.infer_job.boot_file_helper=The startup file is the entry file for your modelarts.infer_job.continue_helper=Check Reuse to copy the output result file of the last training task modelarts.train_job.resource_helper=The "resource specification" is the hardware you use to run the task. In order for more people to use the resources of this platform, please select according to your actual needs modelarts.infer_job.tooltip = The model has been deleted and cannot be viewed. +modelarts.infer_job.model_cant_see = You are currently unable to view the model, possibly due to permission restrictions or the model has been deleted. modelarts.download_log=Download log file modelarts.log_file = Log file modelarts.fullscreen_log_file = View in full screen @@ -1325,7 +1327,7 @@ model.manage.engine=Model engine model.manage.select.engine=Select model engine model.manage.modelfile=Model file model.manage.modellabel=Model label -model.manage.modeldesc=Model description +model.manage.modeldesc=Model brief introduction model.manage.modelaccess=Model Access model.manage.modelaccess.public=Public model.manage.modelaccess.private=Private @@ -2139,7 +2141,7 @@ settings.wiki_deletion_success = The repository wiki data has been deleted. settings.delete = Delete This Repository settings.delete_desc = Deleting a repository is permanent and cannot be undone. settings.delete_notices_1 = - This operation CANNOT be undone. -settings.delete_notices_2 = - This operation will permanently delete the %s repository including code, issues, comments, wiki data and collaborator settings. +settings.delete_notices_2 = - This operation will permanently delete the %s repository including the code, dataset, model, cloudbrain tasks, tasks, merge requests, and other contents. settings.delete_notices_fork_1 = - Forks of this repository will become independent after deletion. settings.deletion_success = The repository has been deleted. settings.update_settings_success = The repository settings have been updated. @@ -2619,6 +2621,7 @@ dashboard = Dashboard users = User Accounts organizations = Organizations datasets= Dataset +models=Model repositories = Repositories hooks = Default Webhooks systemhooks = System Webhooks @@ -3161,7 +3164,7 @@ task_c2ent_gcutrainjob=`created GCU type train task %s` task_inferencejob=`created reasoning task %s` task_benchmark=`created profiling task %s` -task_createmodel=`created new model %s` +task_createmodel=`created new model %s` task_gputrainjob=`created CPU/GPU training task %s` task_c2netnputrainjob=`created NPU training task %s` task_c2netgputrainjob=`created CPU/GPU training task %s` diff --git a/options/locale/locale_zh-CN.ini b/options/locale/locale_zh-CN.ini index 6b1353ebf4..1d3464cd59 100755 --- a/options/locale/locale_zh-CN.ini +++ b/options/locale/locale_zh-CN.ini @@ -1058,6 +1058,7 @@ datasets.desc=数据集功能 cloudbrain_helper=使用GPU/NPU资源,开启Notebook、模型训练任务等 model_manager = 模型 +model_square = 模型广场 model_experience = 模型体验 model_noright=您没有操作权限。 model_rename=模型名称重复,请修改模型名称 @@ -1275,6 +1276,7 @@ modelarts.infer_job.boot_file_helper=启动文件是您程序执行的入口文 modelarts.infer_job.continue_helper=勾选复用将拷贝上次训练任务输出结果文件 modelarts.train_job.resource_helper=「资源规格」是您运行该任务使用的硬件,为了更多人能够使用本平台的资源,请按照您的实际需求进行选择。 modelarts.infer_job.tooltip = 该模型已删除,无法查看。 +modelarts.infer_job.model_cant_see = 您暂时无法查看该模型,可能因为权限限制或模型已被删除。 modelarts.download_log=下载日志文件 modelarts.log_file=日志文件 modelarts.fullscreen_log_file=全屏查看 @@ -1338,7 +1340,7 @@ model.manage.engine=模型框架 model.manage.select.engine=选择模型框架 model.manage.modelfile=模型文件 model.manage.modellabel=模型标签 -model.manage.modeldesc=模型描述 +model.manage.modeldesc=模型简介 model.manage.modelaccess=模型权限 model.manage.modelaccess.public=公开 model.manage.modelaccess.private=私有 @@ -2155,7 +2157,7 @@ settings.wiki_deletion_success=项目百科数据删除成功! settings.delete=删除本项目 settings.delete_desc=删除项目是永久性的, 无法撤消。 settings.delete_notices_1=- 此操作 不可以 被回滚。 -settings.delete_notices_2=- 此操作将永久删除项目 %s,包括 Git 数据、 任务、评论、百科和协作者的操作权限。 +settings.delete_notices_2=- 此操作将永久删除项目 %s,包括该项目中的代码、数据集、模型、云脑任务、任务、合并请求等内容。 settings.delete_notices_fork_1=- 在此项目删除后,它的派生项目将变成独立项目。 settings.deletion_success=项目已被删除。 settings.deletion_notice_cloudbrain=请先停止项目内正在运行的云脑任务,然后再删除项目。 @@ -2638,6 +2640,7 @@ dashboard=管理面板 users=帐户管理 organizations=组织管理 datasets=数据集 +models=模型 repositories=项目管理 hooks=默认Web钩子 systemhooks=系统 Web 钩子 @@ -3179,7 +3182,7 @@ task_c2ent_gcutrainjob=`创建了GCU类型训练任务 %s` task_inferencejob=`创建了推理任务 %s` task_benchmark=`创建了评测任务 %s` -task_createmodel=`导入了新模型 %s` +task_createmodel=`导入了新模型 %s` task_gputrainjob=`创建了CPU/GPU类型训练任务 %s` task_c2netnputrainjob=`创建了NPU类型训练任务 %s` task_c2netgputrainjob=`创建了CPU/GPU类型训练任务 %s` diff --git a/public/home/home.js b/public/home/home.js index 13915c3d46..908589f0eb 100755 --- a/public/home/home.js +++ b/public/home/home.js @@ -290,7 +290,7 @@ function getTaskLink(record){ }else if(record.OpType == 29){ re = re + "/cloudbrain/benchmark/" + record.Content; }else if(record.OpType == 30){ - re = re + "/modelmanage/show_model_info?name=" + record.RefName; + re = re + "/modelmanage/model_readme_tmpl?name=" + record.RefName; }else if(record.OpType == 31){ re = re + "/cloudbrain/train-job/" + record.Content; }else if(record.OpType == 32 || record.OpType == 33 || record.OpType == 42){ diff --git a/public/img/empty-box.svg b/public/img/empty-box.svg new file mode 100644 index 0000000000..6b5f68af80 --- /dev/null +++ b/public/img/empty-box.svg @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/routers/admin/dataset.go b/routers/admin/dataset.go index 49900b8fe9..ac24549249 100644 --- a/routers/admin/dataset.go +++ b/routers/admin/dataset.go @@ -1,11 +1,12 @@ package admin import ( - "code.gitea.io/gitea/modules/notification" "net/http" "strconv" "strings" + "code.gitea.io/gitea/modules/notification" + "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" @@ -14,7 +15,8 @@ import ( ) const ( - tplDatasets base.TplName = "admin/dataset/list" + tplDatasets base.TplName = "admin/dataset/list" + tplAdminModelManage base.TplName = "admin/model/list" ) func Datasets(ctx *context.Context) { diff --git a/routers/admin/modelmanage.go b/routers/admin/modelmanage.go new file mode 100644 index 0000000000..7316348c30 --- /dev/null +++ b/routers/admin/modelmanage.go @@ -0,0 +1,189 @@ +package admin + +import ( + "fmt" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/setting" +) + +func AdminModelManage(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("admin.models") + ctx.Data["PageIsAdmin"] = true + ctx.Data["PageIsAdminModels"] = true + + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = setting.UI.IssuePagingNum + } + + var ( + count int64 + err error + orderBy string + ) + + ctx.Data["SortType"] = ctx.Query("sort") + switch ctx.Query("sort") { + case "newest": + orderBy = "created_unix DESC" + case "oldest": + orderBy = "created_unix ASC" + case "recentupdate": + orderBy = "updated_unix DESC" + case "leastupdate": + orderBy = "updated_unix ASC" + case "reversealphabetically": + orderBy = "name DESC" + case "alphabetically": + orderBy = "name ASC" + case "reversesize": + orderBy = "size DESC" + case "size": + orderBy = "size ASC" + case "downloadtimes": + orderBy = "download_count DESC" + case "mostusecount": + orderBy = "reference_count DESC" + case "fewestusecount": + orderBy = "reference_count ASC" + default: + ctx.Data["SortType"] = "recentupdate" + orderBy = "created_unix DESC" + } + + keyword := strings.Trim(ctx.Query("q"), " ") + + modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: setting.UI.ExplorePagingNum, + }, + Type: -1, + New: -1, + Status: -1, + IsQueryPrivate: true, + IsRecommend: ctx.QueryBool("recommend"), + UserID: -1, + IsCollected: false, + CollectedUserId: -1, + LabelFilter: "", + FrameFilter: -1, + ComputeResourceFilter: "", + Namelike: keyword, + SortType: orderBy, + RepoID: -1, + }) + if err != nil { + ctx.ServerError("Cloudbrain", err) + return + } + userIds := make([]int64, len(modelResult)) + modelIds := make([]string, len(modelResult)) + repoIds := make([]int64, len(modelResult)) + for i, model := range modelResult { + userIds[i] = model.UserId + modelIds[i] = model.ID + repoIds[i] = model.RepoId + } + repoInfo, err := queryRepoInfoByIds(repoIds) + userNameMap := queryUserName(userIds) + + for _, model := range modelResult { + //removeIpInfo(model) + model.TrainTaskInfo = "" + value := userNameMap[model.UserId] + if value != nil { + model.UserName = value.Name + model.UserRelAvatarLink = value.RelAvatarLink() + } + if repoInfo != nil { + repo := repoInfo[model.RepoId] + if repo != nil { + model.RepoName = repo.Name + model.RepoOwnerName = repo.OwnerName + model.RepoDisplayName = repo.DisplayName() + } + } + } + + ctx.Data["Keyword"] = keyword + ctx.Data["Total"] = count + ctx.Data["models"] = modelResult + ctx.Data["Recommend"] = ctx.QueryBool("recommend") + pager := context.NewPagination(int(count), setting.UI.ExplorePagingNum, page, 5) + pager.SetDefaultParams(ctx) + ctx.Data["Page"] = pager + + ctx.HTML(200, tplAdminModelManage) +} + +func ModifyModelRecommend(ctx *context.Context) { + id := ctx.Query("id") + isRecommend := ctx.QueryInt("recommend") + re := map[string]string{ + "code": "-1", + } + task, err := models.QueryModelById(id) + if err != nil || task == nil { + re["msg"] = err.Error() + log.Error("no such model!", err.Error()) + ctx.JSON(200, re) + return + } + if ctx.User == nil || !ctx.User.IsAdmin { + re["msg"] = "No right to operation." + ctx.JSON(200, re) + return + } + + err = models.ModifyModelRecommend(id, isRecommend) + if err == nil { + re["code"] = "0" + ctx.JSON(200, re) + log.Info("modify success.") + } else { + re["msg"] = err.Error() + ctx.JSON(200, re) + log.Info("Failed to modify.id=" + id + " isprivate=" + fmt.Sprint(isRecommend) + " error:" + err.Error()) + } +} + +func queryUserName(intSlice []int64) map[int64]*models.User { + keys := make(map[int64]string) + uniqueElements := []int64{} + for _, entry := range intSlice { + if _, value := keys[entry]; !value { + keys[entry] = "" + uniqueElements = append(uniqueElements, entry) + } + } + result := make(map[int64]*models.User) + userLists, err := models.GetUsersByIDs(uniqueElements) + if err == nil { + for _, user := range userLists { + result[user.ID] = user + } + } + return result +} + +func queryRepoInfoByIds(intSlice []int64) (map[int64]*models.Repository, error) { + keys := make(map[int64]string) + uniqueElements := []int64{} + for _, entry := range intSlice { + if _, value := keys[entry]; !value { + keys[entry] = "" + uniqueElements = append(uniqueElements, entry) + } + } + re, err := models.GetRepositoriesMapByIDs(uniqueElements) + return re, err +} diff --git a/routers/repo/ai_model_convert.go b/routers/repo/ai_model_convert.go index 36664458ea..7affbf5f10 100644 --- a/routers/repo/ai_model_convert.go +++ b/routers/repo/ai_model_convert.go @@ -57,8 +57,8 @@ const ( NetOutputFormat_FP32 = 0 NetOutputFormat_FP16 = 1 - NPU_MINDSPORE_IMAGE_ID = 35 - NPU_TENSORFLOW_IMAGE_ID = 121 + //NPU_MINDSPORE_IMAGE_ID = 37 + //NPU_TENSORFLOW_IMAGE_ID = 38 //GPU_Resource_Specs_ID = 1 //cpu 1, gpu 1 @@ -219,10 +219,10 @@ func createNpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context } var engineId int64 - engineId = int64(NPU_MINDSPORE_IMAGE_ID) + engineId = int64(setting.ModelConvert.NPU_MINDSPORE_IMAGE_ID) bootfile := setting.ModelConvert.MindsporeBootFile if modelConvert.SrcEngine == TENSORFLOW_ENGINE { - engineId = int64(NPU_TENSORFLOW_IMAGE_ID) + engineId = int64(setting.ModelConvert.NPU_TENSORFLOW_IMAGE_ID) bootfile = setting.ModelConvert.TensorFlowNpuBootFile } userCommand := "/bin/bash /home/work/run_train.sh 's3://" + codeObsPath + "' 'code/" + bootfile + "' '/tmp/log/train.log' --'data_url'='s3://" + dataPath + "' --'train_url'='s3://" + outputObsPath + "'" @@ -373,6 +373,16 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context command := "" IMAGE_URL := setting.ModelConvert.GPU_PYTORCH_IMAGE dataActualPath := setting.Attachment.Minio.RealPath + modelRelativePath + if model.Type == models.TypeCloudBrainTwo { + //如果模型在OBS上,需要下载到本地,并上传到minio中 + relatetiveModelPath := setting.JobPath + modelConvert.ID + "/dataset" + log.Info("local dataset path:" + relatetiveModelPath) + downloadFromObsToLocal(model, relatetiveModelPath) + uploadCodeToMinio(relatetiveModelPath+"/", modelConvert.ID, "/dataset/") + deleteLocalDir(relatetiveModelPath) + dataActualPath = setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/dataset" + } + log.Info("dataActualPath=" + dataActualPath) if modelConvert.SrcEngine == PYTORCH_ENGINE { if modelConvert.DestFormat == CONVERT_FORMAT_ONNX { @@ -389,15 +399,6 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context } else { return errors.New("Not support the format.") } - //如果模型在OBS上,需要下载到本地,并上传到minio中 - if model.Type == models.TypeCloudBrainTwo { - relatetiveModelPath := setting.JobPath + modelConvert.ID + "/dataset" - log.Info("local dataset path:" + relatetiveModelPath) - downloadFromObsToLocal(model, relatetiveModelPath) - uploadCodeToMinio(relatetiveModelPath+"/", modelConvert.ID, "/dataset/") - deleteLocalDir(relatetiveModelPath) - dataActualPath = setting.Attachment.Minio.RealPath + setting.Attachment.Minio.Bucket + "/" + setting.CBCodePathPrefix + modelConvert.ID + "/dataset" - } } else if modelConvert.SrcEngine == PADDLE_ENGINE { IMAGE_URL = setting.ModelConvert.GPU_PADDLE_IMAGE if modelConvert.DestFormat == CONVERT_FORMAT_ONNX { @@ -413,7 +414,6 @@ func createGpuTrainJob(modelConvert *models.AiModelConvert, ctx *context.Context return errors.New("Not support the format.") } } - log.Info("dataActualPath=" + dataActualPath) log.Info("command=" + command) diff --git a/routers/repo/ai_model_manage.go b/routers/repo/ai_model_manage.go index e8738fe5c4..036fc437e8 100644 --- a/routers/repo/ai_model_manage.go +++ b/routers/repo/ai_model_manage.go @@ -31,8 +31,8 @@ const ( tplModelManageIndex = "repo/modelmanage/index" tplModelManageDownload = "repo/modelmanage/download" tplModelInfo = "repo/modelmanage/showinfo" - tplCreateLocalModelInfo = "repo/modelmanage/create_local_1" - tplCreateLocalForUploadModelInfo = "repo/modelmanage/create_local_2" + tplCreateLocalModelInfo = "repo/modelmanage/create_local" + tplCreateLocalForUploadModelInfo = "repo/modelmanage/fileupload" tplCreateOnlineModelInfo = "repo/modelmanage/create_online" MODEL_LATEST = 1 @@ -75,14 +75,12 @@ func saveModelByParameters(jobId string, versionName string, name string, versio } } } - cloudType := aiTask.Type + modelSelectedFile := ctx.Query("modelSelectedFile") //download model zip //train type - if aiTask.ComputeResource == models.NPUResource || aiTask.ComputeResource == models.GCUResource { - cloudType = models.TypeCloudBrainTwo - } else if aiTask.ComputeResource == models.GPUResource { - cloudType = models.TypeCloudBrainOne - } + + cloudType := models.TypeCloudBrainTwo + spec, err := resource.GetCloudbrainSpec(aiTask.ID) if err == nil { specJson, _ := json.Marshal(spec) @@ -99,26 +97,27 @@ func saveModelByParameters(jobId string, versionName string, name string, versio aiTaskJson, _ := json.Marshal(aiTask) isPrivate := ctx.QueryBool("isPrivate") model := &models.AiModelManage{ - ID: id, - Version: version, - VersionCount: len(aimodels) + 1, - Label: label, - Name: name, - Description: description, - New: MODEL_LATEST, - Type: cloudType, - Path: modelPath, - Size: modelSize, - AttachmentId: aiTask.Uuid, - RepoId: aiTask.RepoID, - UserId: ctx.User.ID, - CodeBranch: aiTask.BranchName, - CodeCommitID: aiTask.CommitID, - Engine: int64(engine), - TrainTaskInfo: string(aiTaskJson), - Accuracy: string(accuracyJson), - Status: STATUS_COPY_MODEL, - IsPrivate: isPrivate, + ID: id, + Version: version, + VersionCount: len(aimodels) + 1, + Label: label, + Name: name, + Description: description, + New: MODEL_LATEST, + Type: cloudType, + Path: modelPath, + Size: modelSize, + AttachmentId: aiTask.Uuid, + RepoId: aiTask.RepoID, + UserId: ctx.User.ID, + CodeBranch: aiTask.BranchName, + CodeCommitID: aiTask.CommitID, + Engine: int64(engine), + TrainTaskInfo: string(aiTaskJson), + Accuracy: string(accuracyJson), + Status: STATUS_COPY_MODEL, + IsPrivate: isPrivate, + ComputeResource: aiTask.ComputeResource, } err = models.SaveModelToDb(model) @@ -145,7 +144,9 @@ func saveModelByParameters(jobId string, versionName string, name string, versio go asyncToCopyModel(aiTask, id, modelSelectedFile) log.Info("save model end.") - notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask) + if !model.IsPrivate { + notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask) + } return id, nil } @@ -157,6 +158,7 @@ func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile st log.Info("download model from CloudBrainTwo faild." + err.Error()) } else { updateStatus(id, modelSize, STATUS_FINISHED, modelPath, "") + insertModelFile(id) } } else if aiTask.ComputeResource == models.GPUResource { @@ -166,7 +168,24 @@ func asyncToCopyModel(aiTask *models.Cloudbrain, id string, modelSelectedFile st log.Info("download model from CloudBrainOne faild." + err.Error()) } else { updateStatus(id, modelSize, STATUS_FINISHED, modelPath, "") + insertModelFile(id) + } + } +} + +func insertModelFile(id string) { + model, _ := models.QueryModelById(id) + files, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, model.Path[len(setting.Bucket)+1:]) + if err != nil { + log.Info("Failed to query model size from obs. id=" + id) + } + for _, file := range files { + modelFile := &models.AiModelFile{ + ModelID: id, + Name: file.FileName, + Size: file.Size, } + models.SaveModelFile(modelFile) } } @@ -229,13 +248,23 @@ func SaveLocalModel(ctx *context.Context) { engine := ctx.QueryInt("engine") taskType := ctx.QueryInt("type") isPrivate := ctx.QueryBool("isPrivate") + if ctx.Repo.Repository.IsPrivate { + if !isPrivate { + re["msg"] = "Private repo cannot create public model." + ctx.JSON(200, re) + return + } + } modelActualPath := "" + computeResource := "" if taskType == models.TypeCloudBrainOne { destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/" modelActualPath = setting.Attachment.Minio.Bucket + "/" + destKeyNamePrefix + computeResource = models.GPUResource } else if taskType == models.TypeCloudBrainTwo { destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(id) + "/" modelActualPath = setting.Bucket + "/" + destKeyNamePrefix + computeResource = models.NPUResource } else { re["msg"] = "type is error." ctx.JSON(200, re) @@ -257,25 +286,26 @@ func SaveLocalModel(ctx *context.Context) { } } model := &models.AiModelManage{ - ID: id, - Version: version, - ModelType: MODEL_LOCAL_TYPE, - VersionCount: len(aimodels) + 1, - Label: label, - Name: name, - Description: description, - New: MODEL_LATEST, - Type: taskType, - Path: modelActualPath, - Size: 0, - AttachmentId: "", - RepoId: repoId, - UserId: ctx.User.ID, - Engine: int64(engine), - TrainTaskInfo: "", - Accuracy: "", - Status: STATUS_FINISHED, - IsPrivate: isPrivate, + ID: id, + Version: version, + ModelType: MODEL_LOCAL_TYPE, + VersionCount: len(aimodels) + 1, + Label: label, + Name: name, + Description: description, + New: MODEL_LATEST, + Type: taskType, + Path: modelActualPath, + Size: 0, + AttachmentId: "", + RepoId: repoId, + UserId: ctx.User.ID, + Engine: int64(engine), + TrainTaskInfo: "", + Accuracy: "", + Status: STATUS_FINISHED, + IsPrivate: isPrivate, + ComputeResource: computeResource, } err := models.SaveModelToDb(model) @@ -302,7 +332,9 @@ func SaveLocalModel(ctx *context.Context) { models.UpdateRepositoryUnits(ctx.Repo.Repository, units, deleteUnitTypes) log.Info("save model end.") - notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask) + if !model.IsPrivate { + notification.NotifyOtherTask(ctx.User, ctx.Repo.Repository, id, name, models.ActionCreateNewModelTask) + } re["code"] = "0" re["id"] = id ctx.JSON(200, re) @@ -316,27 +348,33 @@ func getSize(files []storage.FileInfo) int64 { return size } -func UpdateModelSize(modeluuid string) { +func UpdateModelSize(modeluuid string, objectName string) { model, err := models.QueryModelById(modeluuid) if err == nil { var size int64 - if model.Type == models.TypeCloudBrainOne { - if strings.HasPrefix(model.Path, setting.Attachment.Minio.Bucket+"/"+Model_prefix) { - files, err := storage.GetAllObjectByBucketAndPrefixMinio(setting.Attachment.Minio.Bucket, model.Path[len(setting.Attachment.Minio.Bucket)+1:]) - if err != nil { - log.Info("Failed to query model size from minio. id=" + modeluuid) - } - size = getSize(files) - models.ModifyModelSize(modeluuid, size) + if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) { + files, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, model.Path[len(setting.Bucket)+1:]) + if err != nil { + log.Info("Failed to query model size from obs. id=" + modeluuid) } - } else if model.Type == models.TypeCloudBrainTwo { - if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) { - files, err := storage.GetAllObjectByBucketAndPrefix(setting.Bucket, model.Path[len(setting.Bucket)+1:]) - if err != nil { - log.Info("Failed to query model size from obs. id=" + modeluuid) + size = getSize(files) + models.ModifyModelSize(modeluuid, size) + modelFileName := objectName + index := strings.LastIndex(objectName, "/") + if index > 0 { + modelFileName = objectName[index+1:] + } + log.Info("modelFileName=" + modelFileName) + for _, file := range files { + log.Info("fileName=" + file.FileName) + if file.FileName == modelFileName { + modelFile := &models.AiModelFile{ + ModelID: modeluuid, + Name: file.FileName, + Size: file.Size, + } + models.SaveModelFile(modelFile) } - size = getSize(files) - models.ModifyModelSize(modeluuid, size) } } if model.Size == 0 && size > 0 { @@ -365,6 +403,14 @@ func SaveModel(ctx *context.Context) { re := map[string]string{ "code": "-1", } + isPrivate := ctx.QueryBool("isPrivate") + if ctx.Repo.Repository.IsPrivate { + if !isPrivate { + re["msg"] = "Private repo cannot create public model." + ctx.JSON(200, re) + return + } + } if JobId == "" || VersionName == "" { re["msg"] = "JobId or VersionName is null." ctx.JSON(200, re) @@ -427,10 +473,10 @@ func downloadModelFromCloudBrainTwo(modelUUID string, jobName string, parentDir } func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir string, trainUrl string, modelSelectedFile string) (string, int64, error) { - modelActualPath := storage.GetMinioPath(jobName, "/model/") - log.Info("modelActualPath=" + modelActualPath) - modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/" destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/" + + modelSrcPrefix := setting.CBCodePathPrefix + jobName + "/model/" + //destKeyNamePrefix := Model_prefix + models.AttachmentRelativePath(modelUUID) + "/" bucketName := setting.Attachment.Minio.Bucket log.Info("destKeyNamePrefix=" + destKeyNamePrefix + " modelSrcPrefix=" + modelSrcPrefix + " bucket=" + bucketName) filterFiles := strings.Split(modelSelectedFile, ";") @@ -442,13 +488,25 @@ func downloadModelFromCloudBrainOne(modelUUID string, jobName string, parentDir if float64(totalSize) > setting.MaxModelSize*MODEL_MAX_SIZE { return "", 0, errors.New("Cannot create model, as model is exceed " + fmt.Sprint(setting.MaxModelSize) + "G.") } - size, err := storage.MinioCopyFiles(bucketName, modelSrcPrefix, destKeyNamePrefix, filterFiles) - if err == nil { - dataActualPath := bucketName + "/" + destKeyNamePrefix - return dataActualPath, size, nil - } else { - return "", 0, nil + + for i, modelFile := range Files { + reader, err := storage.Attachments.DownloadAFile(bucketName, modelFile) + if err == nil { + defer reader.Close() + log.Info("upload to bucket=" + setting.Bucket + " objectKey=" + destKeyNamePrefix + filterFiles[i]) + obsErr := storage.PutReaderToObs(setting.Bucket, destKeyNamePrefix+filterFiles[i], reader) + if obsErr != nil { + log.Info("upload to obs failed.err=" + obsErr.Error()) + return "", 0, nil + } + } } + + //size, err := storage.MinioCopyFiles(bucketName, modelSrcPrefix, destKeyNamePrefix, filterFiles) + dataActualPath := setting.Bucket + "/" + destKeyNamePrefix + //dataActualPath := bucketName + "/" + destKeyNamePrefix + return dataActualPath, totalSize, nil + } func DeleteModelFile(ctx *context.Context) { log.Info("delete model start.") @@ -458,47 +516,32 @@ func DeleteModelFile(ctx *context.Context) { if err == nil { var totalSize int64 if model.ModelType == MODEL_LOCAL_TYPE { - if model.Type == models.TypeCloudBrainOne { - bucketName := setting.Attachment.Minio.Bucket - objectName := model.Path[len(bucketName)+1:] + fileName - log.Info("delete bucket=" + bucketName + " path=" + objectName) - if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) { - totalSize = storage.MinioGetFilesSize(bucketName, []string{objectName}) - err := storage.Attachments.DeleteDir(objectName) - if err != nil { - log.Info("Failed to delete model. id=" + id) - re := map[string]string{ - "code": "-1", - } - re["msg"] = err.Error() - ctx.JSON(200, re) - return - } else { - log.Info("delete minio file size is:" + fmt.Sprint(totalSize)) - models.ModifyModelSize(id, model.Size-totalSize) + + bucketName := setting.Bucket + objectName := model.Path[len(setting.Bucket)+1:] + fileName + log.Info("delete bucket=" + setting.Bucket + " path=" + objectName) + if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) { + totalSize = storage.ObsGetFilesSize(bucketName, []string{objectName}) + err := storage.ObsRemoveObject(bucketName, objectName) + if err != nil { + log.Info("Failed to delete model. id=" + id) + re := map[string]string{ + "code": "-1", } - } - } else if model.Type == models.TypeCloudBrainTwo { - bucketName := setting.Bucket - objectName := model.Path[len(setting.Bucket)+1:] + fileName - log.Info("delete bucket=" + setting.Bucket + " path=" + objectName) - if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) { - totalSize = storage.ObsGetFilesSize(bucketName, []string{objectName}) - err := storage.ObsRemoveObject(bucketName, objectName) - if err != nil { - log.Info("Failed to delete model. id=" + id) - re := map[string]string{ - "code": "-1", - } - re["msg"] = err.Error() - ctx.JSON(200, re) - return - } else { - log.Info("delete obs file size is:" + fmt.Sprint(totalSize)) - models.ModifyModelSize(id, model.Size-totalSize) + re["msg"] = err.Error() + ctx.JSON(200, re) + return + } else { + log.Info("delete obs file size is:" + fmt.Sprint(totalSize)) + models.ModifyModelSize(id, model.Size-totalSize) + modelFile := &models.AiModelFile{ + Name: fileName, + ModelID: id, } + models.DeleteModelFile(modelFile) } } + } if (model.Size - totalSize) <= 0 { go repository.ResetRepoModelNum(model.RepoId) @@ -534,24 +577,12 @@ func deleteModelByID(ctx *context.Context, id string) error { } if err == nil { - if model.Type == models.TypeCloudBrainOne { - bucketName := setting.Attachment.Minio.Bucket - log.Info("bucket=" + bucketName + " path=" + model.Path) - if strings.HasPrefix(model.Path, bucketName+"/"+Model_prefix) { - err := storage.Attachments.DeleteDir(model.Path[len(bucketName)+1:]) - if err != nil { - log.Info("Failed to delete model. id=" + id) - return err - } - } - } else if model.Type == models.TypeCloudBrainTwo { - log.Info("bucket=" + setting.Bucket + " path=" + model.Path) - if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) { - err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:]) - if err != nil { - log.Info("Failed to delete model. id=" + id) - return err - } + log.Info("bucket=" + setting.Bucket + " path=" + model.Path) + if strings.HasPrefix(model.Path, setting.Bucket+"/"+Model_prefix) { + err := storage.ObsRemoveObject(setting.Bucket, model.Path[len(setting.Bucket)+1:]) + if err != nil { + log.Info("Failed to delete model. id=" + id) + return err } } @@ -908,20 +939,37 @@ func QueryModelById(ctx *context.Context) { func ShowSingleModel(ctx *context.Context) { name := ctx.Query("name") - log.Info("Show single ModelInfo start.name=" + name) - models := models.QueryModelByName(name, ctx.Repo.Repository.ID) - - userIds := make([]int64, len(models)) - for i, model := range models { + modelArrays := models.QueryModelByName(name, ctx.Repo.Repository.ID) + modelResult := make([]*models.AiModelManage, 0) + isCanReadPrivateModel := isQueryPrivateModel(ctx) + userIds := make([]int64, len(modelArrays)) + for i, model := range modelArrays { model.IsCanOper = isOperModifyOrDelete(ctx, model.UserId) model.IsCanDownload = isCanDownload(ctx, model) model.IsCanDelete = isCanDelete(ctx, model.UserId) + + model.RepoName = ctx.Repo.Repository.Name + model.RepoOwnerName = ctx.Repo.Repository.OwnerName + model.RepoDisplayName = ctx.Repo.Repository.DisplayName() + userIds[i] = model.UserId + if ctx.User != nil { + re := models.QueryModelCollectByUserId(model.ID, ctx.User.ID) + if re != nil && len(re) > 0 { + model.IsCollected = true + } + } + if model.IsPrivate { + if !isCanReadPrivateModel { + continue + } + } + modelResult = append(modelResult, model) } userNameMap := queryUserName(userIds) - for _, model := range models { + for _, model := range modelResult { removeIpInfo(model) value := userNameMap[model.UserId] if value != nil { @@ -929,7 +977,7 @@ func ShowSingleModel(ctx *context.Context) { model.UserRelAvatarLink = value.RelAvatarLink() } } - ctx.JSON(http.StatusOK, models) + ctx.JSON(http.StatusOK, modelResult) } func removeIpInfo(model *models.AiModelManage) { @@ -1002,6 +1050,7 @@ func SetModelCount(ctx *context.Context) { New: MODEL_LATEST, IsOnlyThisRepo: true, Status: -1, + FrameFilter: -1, IsQueryPrivate: isQueryPrivate, }) ctx.Data["MODEL_COUNT"] = count @@ -1140,6 +1189,7 @@ func ShowModelPageInfo(ctx *context.Context) { IsOnlyThisRepo: true, Status: -1, IsQueryPrivate: isQueryPrivate, + FrameFilter: -1, }) if err != nil { ctx.ServerError("Cloudbrain", err) @@ -1171,16 +1221,6 @@ func ShowModelPageInfo(ctx *context.Context) { ctx.JSON(http.StatusOK, mapInterface) } -func ModifyModel(id string, description string) error { - err := models.ModifyModelDescription(id, description) - if err == nil { - log.Info("modify success.") - } else { - log.Info("Failed to modify.id=" + id + " desc=" + description + " error:" + err.Error()) - } - return err -} - func ModifyModelPrivate(ctx *context.Context) { id := ctx.Query("id") isPrivate := ctx.QueryBool("isPrivate") @@ -1230,34 +1270,34 @@ func ModifyModelInfo(ctx *context.Context) { ctx.JSON(200, re) return } - if task.ModelType == MODEL_LOCAL_TYPE { - name := ctx.Query("name") - label := ctx.Query("label") - description := ctx.Query("description") - engine := ctx.QueryInt("engine") - isPrivate := ctx.QueryBool("isPrivate") - aimodels := models.QueryModelByName(name, task.RepoId) - if aimodels != nil && len(aimodels) > 0 { - if len(aimodels) == 1 { - if aimodels[0].ID != task.ID { - re["msg"] = ctx.Tr("repo.model.manage.create_error") - ctx.JSON(200, re) - return - } - } else { + + name := ctx.Query("name") + label := ctx.Query("label") + description := ctx.Query("description") + engine := ctx.QueryInt("engine") + isPrivate := ctx.QueryBool("isPrivate") + aimodels := models.QueryModelByName(name, task.RepoId) + if aimodels != nil && len(aimodels) > 0 { + if len(aimodels) == 1 { + if aimodels[0].ID != task.ID { re["msg"] = ctx.Tr("repo.model.manage.create_error") ctx.JSON(200, re) return } + } else { + re["msg"] = ctx.Tr("repo.model.manage.create_error") + ctx.JSON(200, re) + return + } + } + err = models.ModifyLocalModel(id, name, label, description, engine, isPrivate) + if task.Name != name { + aimodels = models.QueryModelByName(task.Name, task.RepoId) + if aimodels != nil && len(aimodels) > 0 { + for _, model := range aimodels { + models.ModifyLocalModel(model.ID, name, model.Label, model.Description, int(model.Engine), model.IsPrivate) + } } - err = models.ModifyLocalModel(id, name, label, description, engine, isPrivate) - - } else { - label := ctx.Query("label") - description := ctx.Query("description") - engine := task.Engine - name := task.Name - err = models.ModifyLocalModel(id, name, label, description, int(engine), task.IsPrivate) } if err != nil { @@ -1288,11 +1328,12 @@ func QueryModelListForPredict(ctx *context.Context) { PageSize: pageSize, }, RepoID: repoId, - Type: ctx.QueryInt("type"), + Type: -1, New: -1, Status: 0, IsOnlyThisRepo: true, IsQueryPrivate: isQueryPrivate, + FrameFilter: -1, }) if err != nil { ctx.ServerError("Cloudbrain", err) @@ -1349,26 +1390,27 @@ func QueryOneLevelModelFile(ctx *context.Context) { model, err := models.QueryModelById(id) if err != nil { log.Error("no such model!", err.Error()) - ctx.ServerError("no such model:", err) + ctx.JSON(http.StatusOK, nil) return } + ctx.JSON(http.StatusOK, queryOneLevelModelFile(model, parentDir)) +} + +func queryOneLevelModelFile(model *models.AiModelManage, parentDir string) []storage.FileInfo { + fileinfos := make([]storage.FileInfo, 0) if model.Type == models.TypeCloudBrainTwo { log.Info("TypeCloudBrainTwo list model file.") - prefix := model.Path[len(setting.Bucket)+1:] - fileinfos, _ := storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, parentDir) - if fileinfos == nil { - fileinfos = make([]storage.FileInfo, 0) - } - ctx.JSON(http.StatusOK, fileinfos) + prefix := model.Path[len(setting.Bucket)+1:] + parentDir + fileinfos, _ = storage.GetOneLevelAllObjectUnderDir(setting.Bucket, prefix, "") } else if model.Type == models.TypeCloudBrainOne { log.Info("TypeCloudBrainOne list model file.") - prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:] - fileinfos, _ := storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, parentDir) - if fileinfos == nil { - fileinfos = make([]storage.FileInfo, 0) - } - ctx.JSON(http.StatusOK, fileinfos) + prefix := model.Path[len(setting.Attachment.Minio.Bucket)+1:] + parentDir + fileinfos, _ = storage.GetOneLevelAllObjectUnderDirMinio(setting.Attachment.Minio.Bucket, prefix, "") + } + if fileinfos == nil { + fileinfos = make([]storage.FileInfo, 0) } + return fileinfos } func CreateLocalModel(ctx *context.Context) { @@ -1392,3 +1434,60 @@ func CreateOnlineModel(ctx *context.Context) { ctx.HTML(200, tplCreateOnlineModelInfo) } + +func QueryModelCollectNum(ctx *context.Context) { + id := ctx.Query("id") + record := models.QueryModelCollectNum(id) + ctx.JSON(200, record) +} + +func ModelCollect(ctx *context.Context) { + id := ctx.Query("id") + isCollected := ctx.QueryBool("collected") + re := map[string]string{ + "code": "-1", + } + task, err := models.QueryModelById(id) + if err != nil || task == nil { + re["msg"] = err.Error() + log.Error("no such model!", err.Error()) + ctx.JSON(200, re) + return + } + if ctx.User == nil { + re["msg"] = "user not login." + re["code"] = "401" + ctx.JSON(200, re) + return + } + record := models.QueryModelCollectByUserId(id, ctx.User.ID) + if isCollected { + if record == nil || len(record) == 0 { + log.Info("user collect the model.user id=" + fmt.Sprint(ctx.User.ID) + " model id=" + id) + err := models.SaveModelCollect(&models.AiModelCollect{ + ModelID: id, + UserId: ctx.User.ID, + }) + if err == nil { + re["code"] = "0" + } else { + re["msg"] = err.Error() + } + } + } else { + if record != nil && len(record) > 0 { + log.Info("user delete collect the model.user id=" + fmt.Sprint(ctx.User.ID) + " model id=" + id) + err := models.DeleteModelCollect(&models.AiModelCollect{ + ID: record[0].ID, + }) + if err == nil { + re["code"] = "0" + } else { + re["msg"] = err.Error() + } + } + } + num := models.QueryModelCollectNum(id) + models.ModifyModelCollectedNum(id, num) + ctx.JSON(200, re) +} diff --git a/routers/repo/ai_model_square.go b/routers/repo/ai_model_square.go new file mode 100644 index 0000000000..5cf89a2e99 --- /dev/null +++ b/routers/repo/ai_model_square.go @@ -0,0 +1,515 @@ +package repo + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "strconv" + "strings" + + "code.gitea.io/gitea/models" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + "code.gitea.io/gitea/modules/markup/markdown" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/storage" + "code.gitea.io/gitea/services/cloudbrain/modelmanage" + "code.gitea.io/gitea/services/repository" +) + +const ( + tplModelSquareIndex = "model/square/index" + tplModelSquareReadMe = "repo/modelmanage/readme" + tplModelFileList = "repo/modelmanage/filelist" + tplModelSetting = "repo/modelmanage/setting" + tplModelEvolutionMap = "repo/modelmanage/evolution_map" + README_FILE_NAME = "README.md" +) + +type ModelMap struct { + Type int //0:repo; 1:model + IsParent bool + IsCurrent bool + RepoName string + RepoOwnerName string + RepoDisplayName string + RepoId int64 + Model *models.AiModelManage + Next []*ModelMap +} + +func ModelSquareTmpl(ctx *context.Context) { + ctx.HTML(200, tplModelSquareIndex) +} + +func ModelSquareData(ctx *context.Context) { + log.Info("ShowModel Square Info start.") + page := ctx.QueryInt("page") + if page <= 0 { + page = 1 + } + pageSize := ctx.QueryInt("pageSize") + if pageSize <= 0 { + pageSize = setting.UI.IssuePagingNum + } + isRecommend := ctx.QueryBool("recommend") + queryType := ctx.QueryInt("queryType") + labelFilter := ctx.Query("label") + frameFilterStr := ctx.Query("frame") + orderBy := ctx.Query("orderBy") + Namelike := ctx.Query("q") + TypeStr := ctx.Query("type") + needModelFile := ctx.QueryBool("needModelFile") + frameFilterInt := -1 + if frameFilterStr != "" { + frameFilterInt, _ = strconv.Atoi(frameFilterStr) + } + notNeedEmpty := ctx.QueryBool("notNeedEmpty") + computeResourceFilter := ctx.Query("compute_resource") + var IsQueryPrivate bool + var user_id int64 + var IsQueryCollect bool + var collected_user_id int64 + var repo_id int64 + var typeInt int + typeInt = -1 + if TypeStr != "" { + //typeInt, _ = strconv.Atoi(TypeStr) + } + if queryType == 1 { + IsQueryPrivate = false + user_id = 0 + } else if queryType == 2 { + IsQueryPrivate = true + if ctx.User == nil { + log.Info("the user not login.") + ctx.JSON(http.StatusOK, nil) + return + } + user_id = ctx.User.ID + } else if queryType == 3 { + IsQueryCollect = true + IsQueryPrivate = true + user_id = 0 + if ctx.User == nil { + log.Info("the user not login.") + ctx.JSON(http.StatusOK, nil) + return + } + collected_user_id = ctx.User.ID + } else if queryType == 4 { + if ctx.User == nil { + log.Info("the user not login.") + ctx.JSON(http.StatusOK, nil) + return + } + IsQueryPrivate = true + repoName := ctx.Query("repoName") + repoOwnerName := ctx.Query("repoOwnerName") + repo, err := models.GetRepositoryByOwnerAndName(repoOwnerName, repoName) + if err == nil { + repo_id = repo.ID + } else { + log.Info("the repo is not exist.repoName=" + repoName + " repoOwnerName=" + repoOwnerName) + ctx.JSON(http.StatusOK, nil) + return + } + } else { + log.Info("not support") + ctx.JSON(http.StatusOK, nil) + return + } + SortType := "ai_model_manage.recommend desc,ai_model_manage.collected_count desc,ai_model_manage.download_count desc,ai_model_manage.reference_count desc" + if orderBy != "" { + SortType = "ai_model_manage." + orderBy + " DESC" + } + modelResult, count, err := models.QueryModel(&models.AiModelQueryOptions{ + ListOptions: models.ListOptions{ + Page: page, + PageSize: pageSize, + }, + Type: typeInt, + New: -1, + Status: -1, + IsQueryPrivate: IsQueryPrivate, + IsRecommend: isRecommend, + UserID: user_id, + IsCollected: IsQueryCollect, + CollectedUserId: collected_user_id, + LabelFilter: labelFilter, + FrameFilter: frameFilterInt, + ComputeResourceFilter: computeResourceFilter, + Namelike: Namelike, + SortType: SortType, + RepoID: repo_id, + NotNeedEmpty: notNeedEmpty, + }) + if err != nil { + ctx.ServerError("Cloudbrain", err) + return + } + userIds := make([]int64, len(modelResult)) + modelIds := make([]string, len(modelResult)) + repoIds := make([]int64, len(modelResult)) + for i, model := range modelResult { + userIds[i] = model.UserId + modelIds[i] = model.ID + repoIds[i] = model.RepoId + } + repoInfo, err := queryRepoInfoByIds(repoIds) + userNameMap := queryUserName(userIds) + var modelCollect map[string]*models.AiModelCollect + if ctx.User != nil && queryType != 4 { + modelCollect = models.QueryModelCollectedStatus(modelIds, ctx.User.ID) + } + + for _, model := range modelResult { + //removeIpInfo(model) + model.TrainTaskInfo = "" + value := userNameMap[model.UserId] + if value != nil { + model.UserName = value.Name + model.UserRelAvatarLink = value.RelAvatarLink() + } + if repoInfo != nil { + repo := repoInfo[model.RepoId] + if repo != nil { + model.RepoName = repo.Name + model.RepoOwnerName = repo.OwnerName + model.RepoDisplayName = repo.DisplayName() + } + } + if ctx.User != nil && modelCollect != nil { + value := modelCollect[model.ID] + if value != nil { + model.IsCollected = true + } + } else { + model.IsCollected = false + } + if needModelFile && len(model.Path) > 0 { + //查询模型文件列表 + model.ModelFileList = modelmanage.QueryModelFileByModel(model) + } + } + mapInterface := make(map[string]interface{}) + mapInterface["data"] = modelResult + mapInterface["count"] = count + ctx.JSON(http.StatusOK, mapInterface) +} + +func queryRepoInfoByIds(intSlice []int64) (map[int64]*models.Repository, error) { + keys := make(map[int64]string) + uniqueElements := []int64{} + for _, entry := range intSlice { + if _, value := keys[entry]; !value { + keys[entry] = "" + uniqueElements = append(uniqueElements, entry) + } + } + re, err := models.GetRepositoriesMapByIDs(uniqueElements) + return re, err +} + +func ModelReadMeTmpl(ctx *context.Context) { + ctx.HTML(200, tplModelSquareReadMe) +} + +func ModelFileListTmpl(ctx *context.Context) { + ctx.HTML(200, tplModelFileList) +} + +func ModelFileSettingTmpl(ctx *context.Context) { + ctx.HTML(200, tplModelSetting) +} + +func ModelEvolutionMapTmpl(ctx *context.Context) { + ctx.HTML(200, tplModelEvolutionMap) +} + +func setModelUser(model *models.AiModelManage) { + user, err := models.GetUserByID(model.UserId) + if err == nil { + model.UserName = user.Name + model.UserRelAvatarLink = user.RelAvatarLink() + } +} + +func setModelRepo(model *models.AiModelManage) { + repo, err := models.GetRepositoryByID(model.RepoId) + if err == nil { + model.RepoName = repo.Name + model.RepoOwnerName = repo.OwnerName + model.RepoDisplayName = repo.DisplayName() + } +} + +func ModelEvolutionMapData(ctx *context.Context) { + id := ctx.Query("id") + model, err := models.QueryModelById(id) + re := map[string]interface{}{ + "code": "-1", + } + if err == nil { + removeIpInfo(model) + repo, err := models.GetRepositoryByID(model.RepoId) + model.RepoName = repo.Name + model.RepoOwnerName = repo.OwnerName + model.RepoDisplayName = repo.DisplayName() + if err == nil { + setModelUser(model) + currentNode := &ModelMap{ + Type: 1, + IsCurrent: true, + RepoName: repo.Name, + RepoOwnerName: repo.OwnerName, + RepoDisplayName: repo.DisplayName(), + RepoId: repo.ID, + Model: model, + } + ParentNode := findParent(model) + if ParentNode != nil { + nexts := make([]*ModelMap, 0) + nexts = append(nexts, currentNode) + ParentNode.Next = nexts + } else { + ParentNode = currentNode + } + findChild(currentNode) + re["code"] = "0" + re["node"] = ParentNode + ctx.JSON(200, ParentNode) + } + } else { + re["msg"] = "No such model." + ctx.JSON(200, re) + } +} + +func findParent(model *models.AiModelManage) *ModelMap { + if model.TrainTaskInfo != "" { + var task models.Cloudbrain + err := json.Unmarshal([]byte(model.TrainTaskInfo), &task) + if err != nil { + log.Info("error=" + err.Error()) + } else { + log.Info("find parent model name." + task.ModelName) + if task.ModelName != "" { + if task.ModelId != "" { + parentModel, err := models.QueryModelById(task.ModelId) + setModelRepo(parentModel) + setModelUser(parentModel) + if err == nil { + re := &ModelMap{ + Type: 1, + IsParent: true, + Model: parentModel, + } + return re + } + } else { + modelList := models.QueryModelByName(task.ModelName, task.RepoID) + if modelList != nil && len(modelList) > 0 { + for _, parentModel := range modelList { + setModelUser(parentModel) + setModelRepo(parentModel) + if parentModel.Version == task.ModelVersion { + re := &ModelMap{ + Type: 1, + IsParent: true, + Model: parentModel, + } + return re + } + } + } + } + } + } + } + return nil +} + +func findChild(currentNode *ModelMap) { + log.Info("find child start.") + if currentNode.Model != nil { + currentModel := currentNode.Model + re, err := models.GetCloudBrainByModelId(currentModel.ID) + if err != nil || len(re) == 0 { + re, err = models.GetCloudBrainByRepoIdAndModelName(currentModel.RepoId, currentModel.Name) + } + log.Info("start to load child model.") + if err == nil && len(re) > 0 { + repoNodes := getRepoNodes(re) + currentNode.Next = repoNodes + for _, node := range repoNodes { + childModels := models.QueryModelByRepoId(node.RepoId) + childNodes := make([]*ModelMap, 0) + if childModels != nil && len(childModels) > 0 { + for _, childModel := range childModels { + if childModel.TrainTaskInfo != "" { + childModel.RepoName = node.RepoName + childModel.RepoOwnerName = node.RepoOwnerName + childModel.RepoDisplayName = node.RepoDisplayName + log.Info("childModel.RepoName=" + childModel.RepoName) + log.Info("childModel.RepoOwnerName=" + childModel.RepoOwnerName) + var task models.Cloudbrain + err := json.Unmarshal([]byte(childModel.TrainTaskInfo), &task) + if err != nil { + log.Info("error=" + err.Error()) + } else { + log.Info("task.ModelId=%v,currentModel.ID=%v", task.ModelId, currentModel.ID) + if task.ModelId != "" && task.ModelId == currentModel.ID { + setModelUser(childModel) + modelMap := &ModelMap{ + Type: 1, + Model: childModel, + } + childNodes = append(childNodes, modelMap) + } else { + log.Info("task.ModelName=%v,currentModel.Name=%v", task.ModelName, currentModel.Name) + log.Info("task.ModelVersion=%v,currentModel.Version=%v", task.ModelVersion, currentModel.Version) + if task.ModelName == currentModel.Name && task.ModelVersion == currentModel.Version { + setModelUser(childModel) + modelMap := &ModelMap{ + Type: 1, + Model: childModel, + } + childNodes = append(childNodes, modelMap) + } + } + } + } + } + + } + node.Next = childNodes + for _, child := range childNodes { + findChild(child) + } + } + } + } else { + log.Info("the current model is nil.") + } + +} + +func getRepoNodes(re []*models.Cloudbrain) []*ModelMap { + result := make([]*ModelMap, 0) + repoMap := make(map[int64]string, 0) + for _, task := range re { + repo, err := models.GetRepositoryByID(task.RepoID) + if err == nil { + if _, ok := repoMap[repo.ID]; !ok { + modelMap := &ModelMap{ + Type: 0, + RepoName: repo.Name, + RepoOwnerName: repo.OwnerName, + RepoDisplayName: repo.DisplayName(), + RepoId: repo.ID, + } + result = append(result, modelMap) + } + repoMap[repo.ID] = "true" + } + } + return result +} + +func ModifyModelReadMe(ctx *context.Context) { + id := ctx.Query("id") + model, err := models.QueryModelById(id) + re := map[string]string{ + "code": "-1", + } + if err == nil { + content := ctx.Query("content") + path := Model_prefix + models.AttachmentRelativePath(id) + "/" + if model.Type == models.TypeCloudBrainTwo { + err = storage.PutStringToObs(setting.Bucket, path+README_FILE_NAME, content) + if err != nil { + re["msg"] = "Failed to created readme file." + log.Info("Failed to created readme file. as:" + err.Error()) + } else { + re["code"] = "0" + } + } else { + re["msg"] = "Cannot support the model type=" + fmt.Sprint(model.Type) + } + ctx.JSON(200, re) + } else { + re["msg"] = "No such model." + ctx.JSON(200, re) + } +} + +func QueryModelReadMe(ctx *context.Context) { + id := ctx.Query("id") + model, err := models.QueryModelById(id) + re := map[string]string{ + "code": "-1", + } + if err == nil { + files := queryOneLevelModelFile(model, "") + find := false + var content []byte + for _, file := range files { + if strings.ToLower(file.FileName) == strings.ToLower(README_FILE_NAME) { + find = true + path := Model_prefix + models.AttachmentRelativePath(id) + "/" + body, err := storage.ObsDownloadAFile(setting.Bucket, path+file.FileName) + if err != nil { + log.Info("download file failed: %s\n", err.Error()) + break + } else { + defer body.Close() + content, err = ioutil.ReadAll(body) + } + } + } + if find { + re["isExistMDFile"] = "true" + re["fileName"] = README_FILE_NAME + strc := string(content) + re["content"] = strc + re["htmlcontent"] = string(markdown.RenderRaw([]byte(strc), "", false)) + } else { + re["isExistMDFile"] = "false" + re["fileName"] = README_FILE_NAME + url := setting.RecommentRepoAddr + "model/" + README_FILE_NAME + result, err := repository.RecommendContentFromPromote(url) + if err == nil { + re["content"] = result + re["htmlcontent"] = string(markdown.RenderRaw([]byte(result), "", false)) + } + } + re["code"] = "0" + ctx.JSON(200, re) + } else { + re["msg"] = "No such model." + ctx.JSON(200, re) + } + +} + +func QueryModelLabel(ctx *context.Context) { + url := setting.RecommentRepoAddr + "model/label.json" + result, err := repository.RecommendContentFromPromote(url) + log.Info("label result=" + result) + remap := make([]map[string]string, 0) + if err == nil { + err = json.Unmarshal([]byte(result), &remap) + if err != nil { + log.Info("error=" + err.Error()) + } + } else { + log.Info("error=" + err.Error()) + } + if err == nil { + ctx.JSON(200, remap) + } else { + ctx.JSON(200, "") + } +} diff --git a/routers/repo/aisafety.go b/routers/repo/aisafety.go index bd323a1a5f..1e61b1f266 100644 --- a/routers/repo/aisafety.go +++ b/routers/repo/aisafety.go @@ -669,7 +669,7 @@ func createForNPU(ctx *context.Context, jobName string) error { log.Info("engine_id=" + fmt.Sprint(engineID)) poolID := ctx.Query("pool_id") repo := ctx.Repo.Repository - + modelId := ctx.Query("model_id") trainUrl := ctx.Query("pre_train_model_url") modelName := ctx.Query("model_name") modelVersion := ctx.Query("model_version") @@ -814,6 +814,7 @@ func createForNPU(ctx *context.Context, jobName string) error { ModelName: modelName, ModelVersion: modelVersion, CkptName: ckptName, + ModelId: modelId, ResultUrl: resultObsPath, Spec: spec, DatasetName: datasetNames, @@ -839,13 +840,14 @@ func createForGPU(ctx *context.Context, jobName string) error { evaluationIndex := ctx.Query("evaluation_index") Params := ctx.Query("run_para_list") specId := ctx.QueryInt64("spec_id") - TrainUrl := ctx.Query("pre_train_model_url") + //TrainUrl := ctx.Query("pre_train_model_url") CkptName := ctx.Query("ckpt_name") modelName := ctx.Query("model_name") + modelId := ctx.Query("model_id") modelVersion := ctx.Query("model_version") - ckptUrl := setting.Attachment.Minio.RealPath + TrainUrl + CkptName - log.Info("ckpt url:" + ckptUrl) + //ckptUrl := setting.Attachment.Minio.RealPath + TrainUrl + CkptName + //log.Info("ckpt url:" + ckptUrl) spec, err := resource.GetAndCheckSpec(ctx.User.ID, specId, models.FindSpecsOptions{ JobType: models.JobTypeBenchmark, ComputeResource: models.GPU, @@ -891,7 +893,11 @@ func createForGPU(ctx *context.Context, jobName string) error { return errors.New(ctx.Tr("cloudbrain.error.dataset_select")) } log.Info("Command=" + command) - + minioPreModelURL, err := dealModelInfo(modelId, jobName, CkptName) + if err != nil { + log.Error("Can not find model", err) + return errors.New(ctx.Tr("repo.modelconvert.manage.model_not_exist")) + } req := cloudbrain.GenerateCloudBrainTaskReq{ Ctx: ctx, DisplayJobName: displayJobName, @@ -902,7 +908,7 @@ func createForGPU(ctx *context.Context, jobName string) error { DatasetNames: datasetNames, DatasetInfos: datasetInfos, CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), - ModelPath: setting.Attachment.Minio.RealPath + TrainUrl, + ModelPath: setting.Attachment.Minio.RealPath + minioPreModelURL, BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), @@ -915,6 +921,7 @@ func createForGPU(ctx *context.Context, jobName string) error { ModelName: modelName, ModelVersion: modelVersion, CkptName: CkptName, + ModelId: modelId, ResultPath: storage.GetMinioPath(jobName, cloudbrain.ResultPath+"/"), Spec: spec, LabelName: evaluationIndex, diff --git a/routers/repo/attachment_model.go b/routers/repo/attachment_model.go index efc7cbe087..f952b25f0c 100644 --- a/routers/repo/attachment_model.go +++ b/routers/repo/attachment_model.go @@ -314,7 +314,7 @@ func CompleteModelMultipart(ctx *context.Context) { return } //更新模型大小信息 - UpdateModelSize(modeluuid) + UpdateModelSize(modeluuid, fileChunk.ObjectName) ctx.JSON(200, map[string]string{ "result_code": "0", diff --git a/routers/repo/cloudbrain.go b/routers/repo/cloudbrain.go index 71e249bf7f..230129a574 100755 --- a/routers/repo/cloudbrain.go +++ b/routers/repo/cloudbrain.go @@ -15,8 +15,6 @@ import ( "time" "unicode/utf8" - "code.gitea.io/gitea/services/cloudbrain/modelmanage" - "code.gitea.io/gitea/services/lock" cloudbrainService "code.gitea.io/gitea/services/cloudbrain" @@ -152,6 +150,7 @@ func cloudBrainNewDataPrepare(ctx *context.Context, jobType string) error { ctx.Data["model_name"] = ctx.Cloudbrain.ModelName ctx.Data["label_name"] = ctx.Cloudbrain.LabelName ctx.Data["ckpt_name"] = ctx.Cloudbrain.CkptName + ctx.Data["model_id"] = ctx.Cloudbrain.ModelId ctx.Data["model_version"] = ctx.Cloudbrain.ModelVersion ctx.Data["pre_train_model_url"] = ctx.Cloudbrain.PreTrainModelUrl ctx.Data["compute_resource"] = ctx.Cloudbrain.ComputeResource @@ -390,19 +389,21 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { } if form.ModelName != "" { //使用预训练模型训练 - _, err := models.QueryModelByPath(form.PreTrainModelUrl) + + req.ModelName = form.ModelName + req.LabelName = form.LabelName + req.CkptName = form.CkptName + req.ModelId = form.ModelId + req.ModelVersion = form.ModelVersion + minioPreModelURL, err := dealModelInfo(form.ModelId, jobName, form.CkptName) if err != nil { log.Error("Can not find model", err) cloudBrainNewDataPrepare(ctx, jobType) ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form) return } - req.ModelName = form.ModelName - req.LabelName = form.LabelName - req.CkptName = form.CkptName - req.ModelVersion = form.ModelVersion - req.PreTrainModelPath = setting.Attachment.Minio.RealPath + form.PreTrainModelUrl - req.PreTrainModelUrl = form.PreTrainModelUrl + req.PreTrainModelPath = setting.Attachment.Minio.RealPath + minioPreModelURL + req.PreTrainModelUrl = minioPreModelURL } if form.IsContinue { // qizhi GPU 继续训练,将旧任务输出文件拷贝至新任务输出路径 @@ -430,6 +431,43 @@ func cloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) { } } +func dealModelInfo(modelId string, jobName string, ckptName string) (string, error) { + preModel, err := models.QueryModelById(modelId) + if err != nil { + log.Error("Can not find model", err) + return "", err + } + minioPreModelURL, err := downloadModelFromObs(preModel, jobName, cloudbrain.PretrainModelMountPath, ckptName) + if err != nil { + log.Error("Can not find model", err) + + return "", err + } + return minioPreModelURL, nil +} + +func downloadModelFromObs(preModel *models.AiModelManage, jobName, suffixPath string, ckptFileName string) (string, error) { + destPath := setting.CBCodePathPrefix + jobName + suffixPath + "/" + destFile := destPath + ckptFileName + returnStr := setting.Attachment.Minio.Bucket + "/" + destPath + srcUrl := preModel.Path[len(setting.Bucket)+1:] + ckptFileName + log.Info("dest model Path=" + returnStr + " src path=" + preModel.Path + ckptFileName) + body, err := storage.ObsDownloadAFile(setting.Bucket, srcUrl) + if err == nil { + defer body.Close() + _, err = storage.Attachments.UploadContent(setting.Attachment.Minio.Bucket, destFile, body) + if err != nil { + log.Error("UploadObject(%s) failed: %s", preModel.Path+ckptFileName, err.Error()) + return "", err + } + } else { + log.Info("download model failed. as " + err.Error()) + return "", err + } + log.Info("download model from obs succeed") + return returnStr, nil +} + func MinioCopyResults(srcPath string, destPath string) error { log.Info("prev task obs path:", setting.Attachment.Minio.Bucket+srcPath) log.Info("current task obs path:", setting.Attachment.Minio.Bucket+destPath) @@ -531,8 +569,8 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra return } - ckptUrl := setting.Attachment.Minio.RealPath + form.TrainUrl + form.CkptName - log.Info("ckpt url:" + ckptUrl) + //ckptUrl := setting.Attachment.Minio.RealPath + form.TrainUrl + form.CkptName + //log.Info("ckpt url:" + ckptUrl) command, err := getInferenceJobCommand(form) if err != nil { log.Error("getTrainJobCommand failed: %v", err) @@ -607,6 +645,14 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra return } + minioPreModelURL, err := dealModelInfo(form.ModelId, jobName, form.CkptName) + if err != nil { + log.Error("Can not find model", err) + cloudBrainNewDataPrepare(ctx, jobType) + ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form) + return + } + req := cloudbrain.GenerateCloudBrainTaskReq{ Ctx: ctx, DisplayJobName: displayJobName, @@ -617,7 +663,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra DatasetNames: datasetNames, DatasetInfos: datasetInfos, CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), - ModelPath: setting.Attachment.Minio.RealPath + form.TrainUrl, + ModelPath: setting.Attachment.Minio.RealPath + minioPreModelURL, BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), @@ -631,7 +677,8 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, form auth.CreateCloudBra ModelName: form.ModelName, ModelVersion: form.ModelVersion, CkptName: form.CkptName, - TrainUrl: form.TrainUrl, + ModelId: form.ModelId, + TrainUrl: form.PreTrainModelUrl, LabelName: labelName, Spec: spec, } @@ -740,11 +787,11 @@ func CloudBrainRestart(ctx *context.Context) { break } } - if !modelmanage.HasModelFile(task) { - resultCode = "-1" - errorMsg = ctx.Tr("repo.debug.manage.model_not_exist") - break - } + // if !modelmanage.HasModelFile(task) { + // resultCode = "-1" + // errorMsg = ctx.Tr("repo.debug.manage.model_not_exist") + // break + // } if hasDatasetDeleted(task) { resultCode = "-1" @@ -2734,8 +2781,18 @@ func ModelBenchmarkCreate(ctx *context.Context, form auth.CreateCloudBrainForm) req.LabelName = form.LabelName req.CkptName = form.CkptName req.ModelVersion = form.ModelVersion - req.PreTrainModelPath = setting.Attachment.Minio.RealPath + form.PreTrainModelUrl - req.PreTrainModelUrl = form.PreTrainModelUrl + req.ModelId = form.ModelId + + minioPreModelURL, err := dealModelInfo(form.ModelId, jobName, form.CkptName) + if err != nil { + log.Error("Can not find model", err) + cloudBrainNewDataPrepare(ctx, jobType) + ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form) + return + } + + req.PreTrainModelPath = setting.Attachment.Minio.RealPath + minioPreModelURL + req.PreTrainModelUrl = minioPreModelURL } _, err = cloudbrain.GenerateTask(req) diff --git a/routers/repo/grampus.go b/routers/repo/grampus.go index b92dc1a1e6..b5920eafbe 100755 --- a/routers/repo/grampus.go +++ b/routers/repo/grampus.go @@ -303,7 +303,7 @@ func GrampusNotebookCreate(ctx *context.Context, form auth.CreateGrampusNotebook if form.ModelName != "" { //使用预训练模型训练 - m, err := models.QueryModelByPath(form.PreTrainModelUrl) + m, err := models.QueryModelById(form.ModelId) if err != nil { log.Error("Can not find model", err) grampusNotebookNewDataPrepare(ctx, processType) @@ -319,7 +319,9 @@ func GrampusNotebookCreate(ctx *context.Context, form auth.CreateGrampusNotebook req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion + req.PreTrainModelUrl = form.PreTrainModelUrl req.PreTrainModelPath = getPreTrainModelPath(form.PreTrainModelUrl, form.CkptName) req.ModelStorageType = m.Type @@ -464,6 +466,7 @@ func grampusTrainJobNewDataPrepare(ctx *context.Context, processType string) err ctx.Data["model_version"] = ctx.Cloudbrain.ModelVersion ctx.Data["ckpt_name"] = ctx.Cloudbrain.CkptName + ctx.Data["model_id"] = ctx.Cloudbrain.ModelId ctx.Data["label_names"] = ctx.Cloudbrain.LabelName ctx.Data["pre_train_model_url"] = ctx.Cloudbrain.PreTrainModelUrl spec, _ := resource.GetCloudbrainSpec(ctx.Cloudbrain.ID) @@ -751,9 +754,9 @@ func grampusTrainJobGpuCreate(ctx *context.Context, form auth.CreateGrampusTrain req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl - } _, err = grampus.GenerateTrainJob(ctx, req) @@ -980,7 +983,7 @@ func grampusTrainJobGcuCreate(ctx *context.Context, form auth.CreateGrampusTrain req.CkptName = form.CkptName req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl - + req.ModelId = form.ModelId } _, err = grampus.GenerateTrainJob(ctx, req) @@ -1241,6 +1244,7 @@ func grampusTrainJobNpuCreate(ctx *context.Context, form auth.CreateGrampusTrain req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl req.PreTrainModelPath = preTrainModelPath diff --git a/routers/repo/modelarts.go b/routers/repo/modelarts.go index 50dfd7ebe5..9f714bc8ab 100755 --- a/routers/repo/modelarts.go +++ b/routers/repo/modelarts.go @@ -282,6 +282,7 @@ func Notebook2Create(ctx *context.Context, form auth.CreateModelArtsNotebookForm req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl @@ -630,6 +631,7 @@ func NotebookRestart(ctx *context.Context) { LabelName: task.LabelName, PreTrainModelUrl: task.PreTrainModelUrl, CkptName: task.CkptName, + ModelId: task.ModelId, } err = models.RestartCloudbrain(task, newTask) @@ -1052,6 +1054,7 @@ func trainJobNewVersionDataPrepare(ctx *context.Context) error { ctx.Data["model_name"] = task.ModelName ctx.Data["model_version"] = task.ModelVersion ctx.Data["ckpt_name"] = task.CkptName + ctx.Data["model_id"] = ctx.Cloudbrain.ModelId ctx.Data["label_names"] = task.LabelName ctx.Data["pre_train_model_url"] = task.PreTrainModelUrl @@ -1351,6 +1354,7 @@ func TrainJobCreate(ctx *context.Context, form auth.CreateModelArtsTrainJobForm) req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl @@ -1746,6 +1750,7 @@ func TrainJobCreateVersion(ctx *context.Context, form auth.CreateModelArtsTrainJ req.ModelName = form.ModelName req.LabelName = form.LabelName req.CkptName = form.CkptName + req.ModelId = form.ModelId req.ModelVersion = form.ModelVersion req.PreTrainModelUrl = form.PreTrainModelUrl @@ -2157,11 +2162,11 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference LabelName := form.LabelName isLatestVersion := modelarts.IsLatestVersion VersionCount := modelarts.VersionCountOne - trainUrl := form.TrainUrl + trainUrl := form.PreTrainModelUrl modelName := form.ModelName modelVersion := form.ModelVersion ckptName := form.CkptName - ckptUrl := "/" + form.TrainUrl + form.CkptName + ckptUrl := "/" + form.PreTrainModelUrl + form.CkptName log.Info("ckpt url:" + ckptUrl) errStr := checkInferenceJobMultiNode(ctx.User.ID, form.WorkServerNumber) @@ -2376,6 +2381,7 @@ func InferenceJobCreate(ctx *context.Context, form auth.CreateModelArtsInference ModelName: modelName, ModelVersion: modelVersion, CkptName: ckptName, + ModelId: form.ModelId, ResultUrl: resultObsPath, Spec: spec, DatasetName: datasetNames, @@ -2493,6 +2499,18 @@ func InferenceJobIndex(ctx *context.Context) { if tasks[i].ComputeResource == "" { tasks[i].ComputeResource = models.NPUResource } + if tasks[i].ModelId != "" { + model, err := models.QueryModelById(tasks[i].ModelId) + if err == nil && model != nil { + if model.RepoId != tasks[i].RepoID { + repo, err := models.GetRepositoryByID(model.RepoId) + if err == nil && repo != nil { + tasks[i].ModelRepoName = repo.Name + tasks[i].ModelRepoOwnerName = repo.OwnerName + } + } + } + } } isQueryPrivate := isQueryPrivateModel(ctx) repoId := ctx.Repo.Repository.ID @@ -2671,7 +2689,8 @@ func inferenceJobErrorNewDataPrepare(ctx *context.Context, form auth.CreateModel ctx.Data["model_name"] = form.ModelName ctx.Data["model_version"] = form.ModelVersion ctx.Data["ckpt_name"] = form.CkptName - ctx.Data["train_url"] = form.TrainUrl + ctx.Data["model_id"] = form.ModelId + ctx.Data["pre_train_model_url"] = form.PreTrainModelUrl ctx.Data["datasetType"] = models.TypeCloudBrainTwo waitCount := cloudbrain.GetWaitingCloudbrainCount(models.TypeCloudBrainTwo, "") ctx.Data["WaitCount"] = waitCount diff --git a/routers/repo/user_data_analysis.go b/routers/repo/user_data_analysis.go index 5ee0f64fc0..573b65b8d7 100755 --- a/routers/repo/user_data_analysis.go +++ b/routers/repo/user_data_analysis.go @@ -721,10 +721,10 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05")) warnEmailMessage := "用户统计信息入库失败,请尽快定位。" - startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location()) - endYear := startYear.AddDate(1, 0, 0) + //startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location()) + //endYear := startYear.AddDate(1, 0, 0) - models.RefreshUserYearTable(startYear, endYear) + //models.RefreshUserYearTable(startYear, endYear) //query wiki data log.Info("start to time count data") diff --git a/routers/routes/routes.go b/routers/routes/routes.go index d87bf33340..b63ea57a96 100755 --- a/routers/routes/routes.go +++ b/routers/routes/routes.go @@ -366,6 +366,13 @@ func RegisterRoutes(m *macaron.Macaron) { m.Get("/home/notice", routers.HomeNoticeTmpl) m.Get("/home/privacy", routers.HomePrivacy) + m.Group("/modelsquare", func() { + m.Get("/main", repo.ModelSquareTmpl) + m.Get("/main_query_data", repo.ModelSquareData) + m.Put("/modify_model_collect", repo.ModelCollect) + m.Get("/main_query_label", repo.QueryModelLabel) + }) + m.Group("/extension", func() { // m.Get("", modelapp.ModelMainPage) m.Get("/tuomin/upload", modelapp.ProcessImageUI) @@ -610,6 +617,10 @@ func RegisterRoutes(m *macaron.Macaron) { m.Put("/:id/action/:action", admin.DatasetAction) // m.Post("/delete", admin.DeleteDataset) }) + m.Group("/model", func() { + m.Get("", admin.AdminModelManage) + m.Put("/action", admin.ModifyModelRecommend) + }) m.Group("/cloudbrains", func() { m.Get("", admin.CloudBrains) m.Get("/download", admin.DownloadCloudBrains) @@ -1309,20 +1320,29 @@ func RegisterRoutes(m *macaron.Macaron) { }) }, context.RepoRef()) m.Group("/modelmanage", func() { - m.Get("/create_local_model_1", repo.CreateLocalModel) - m.Get("/create_local_model_2", repo.CreateLocalModelForUpload) + m.Get("/create_local_model", repo.CreateLocalModel) m.Get("/create_online_model", repo.CreateOnlineModel) m.Post("/create_local_model", repo.SaveLocalModel) m.Delete("/delete_model_file", repo.DeleteModelFile) + m.Get("/model_readme_tmpl", repo.ModelReadMeTmpl) + m.Get("/model_readme_data", repo.QueryModelReadMe) + m.Post("/model_readme_data", repo.ModifyModelReadMe) + m.Get("/model_filelist_tmpl", repo.ModelFileListTmpl) + m.Get("/model_fileupload_tmpl", repo.CreateLocalModelForUpload) + m.Get("/model_setting", repo.ModelFileSettingTmpl) + m.Get("/model_evolution_map", repo.ModelEvolutionMapTmpl) + m.Get("/model_evolution_map_data", repo.ModelEvolutionMapData) + m.Post("/create_model", repo.SaveModel) m.Post("/create_model_convert", reqWechatBind, reqRepoModelManageWriter, repo.SaveModelConvert) m.Post("/create_new_model", repo.SaveNewNameModel) m.Delete("/delete_model", repo.DeleteModel) m.Post("/delete_model_convert/:id", repo.DeleteModelConvert) m.Post("/convert_stop/:id", repo.StopModelConvert) - m.Put("/modify_model", repo.ModifyModelInfo) - m.Put("/modify_model_status", repo.ModifyModelPrivate) + m.Put("/modify_model", reqRepoModelManageWriter, repo.ModifyModelInfo) + m.Put("/modify_model_status", reqRepoModelManageWriter, repo.ModifyModelPrivate) + m.Get("/show_model_collect_num", reqRepoModelManageReader, repo.QueryModelCollectNum) m.Get("/show_model", reqRepoModelManageReader, repo.ShowModelTemplate) m.Get("/convert_model", reqRepoModelManageReader, repo.ConvertModelTemplate) m.Get("/show_model_info", repo.ShowModelInfo) diff --git a/services/cloudbrain/cloudbrainTask/inference.go b/services/cloudbrain/cloudbrainTask/inference.go index a531cb43da..5ecad1bad9 100644 --- a/services/cloudbrain/cloudbrainTask/inference.go +++ b/services/cloudbrain/cloudbrainTask/inference.go @@ -2,8 +2,6 @@ package cloudbrainTask import ( "bufio" - cloudbrainService "code.gitea.io/gitea/services/cloudbrain" - "code.gitea.io/gitea/services/lock" "encoding/json" "errors" "io" @@ -15,6 +13,9 @@ import ( "strings" "unicode/utf8" + cloudbrainService "code.gitea.io/gitea/services/cloudbrain" + "code.gitea.io/gitea/services/lock" + "code.gitea.io/gitea/modules/modelarts" "code.gitea.io/gitea/modules/git" @@ -147,6 +148,14 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJo return } + minioPreModelURL, err := dealModelInfo(option.ModelId, jobName, option.CkptName) + if err != nil { + log.Error("Can not find model", err) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.modelconvert.manage.model_not_exist"))) + //ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form) + return + } + req := cloudbrain.GenerateCloudBrainTaskReq{ Ctx: ctx, DisplayJobName: displayJobName, @@ -157,7 +166,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJo DatasetNames: datasetNames, DatasetInfos: datasetInfos, CodePath: storage.GetMinioPath(jobName, cloudbrain.CodeMountPath+"/"), - ModelPath: setting.Attachment.Minio.RealPath + option.PreTrainModelUrl, + ModelPath: setting.Attachment.Minio.RealPath + minioPreModelURL, BenchmarkPath: storage.GetMinioPath(jobName, cloudbrain.BenchMarkMountPath+"/"), Snn4ImageNetPath: storage.GetMinioPath(jobName, cloudbrain.Snn4imagenetMountPath+"/"), BrainScorePath: storage.GetMinioPath(jobName, cloudbrain.BrainScoreMountPath+"/"), @@ -171,6 +180,7 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJo ModelName: option.ModelName, ModelVersion: option.ModelVersion, CkptName: option.CkptName, + ModelId: option.ModelId, TrainUrl: option.PreTrainModelUrl, LabelName: labelName, Spec: spec, @@ -185,6 +195,43 @@ func CloudBrainInferenceJobCreate(ctx *context.Context, option api.CreateTrainJo ctx.JSON(http.StatusOK, models.BaseMessageApi{Code: 0, Message: jobId}) } +func dealModelInfo(modelId string, jobName string, ckptName string) (string, error) { + preModel, err := models.QueryModelById(modelId) + if err != nil { + log.Error("Can not find model", err) + return "", err + } + minioPreModelURL, err := downloadModelFromObs(preModel, jobName, cloudbrain.PretrainModelMountPath, ckptName) + if err != nil { + log.Error("Can not find model", err) + + return "", err + } + return minioPreModelURL, nil +} + +func downloadModelFromObs(preModel *models.AiModelManage, jobName, suffixPath string, ckptFileName string) (string, error) { + destPath := setting.CBCodePathPrefix + jobName + suffixPath + "/" + destFile := destPath + ckptFileName + returnStr := setting.Attachment.Minio.Bucket + "/" + destPath + srcUrl := preModel.Path[len(setting.Bucket)+1:] + ckptFileName + log.Info("dest model Path=" + returnStr + " src path=" + preModel.Path + ckptFileName) + body, err := storage.ObsDownloadAFile(setting.Bucket, srcUrl) + if err == nil { + defer body.Close() + _, err = storage.Attachments.UploadContent(setting.Attachment.Minio.Bucket, destFile, body) + if err != nil { + log.Error("UploadObject(%s) failed: %s", preModel.Path+ckptFileName, err.Error()) + return "", err + } + } else { + log.Info("download model failed. as " + err.Error()) + return "", err + } + log.Info("download model from obs succeed") + return returnStr, nil +} + func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJobOption) { ctx.Data["PageIsTrainJob"] = true VersionOutputPath := modelarts.GetOutputPathByCount(modelarts.TotalVersionCount) @@ -418,6 +465,7 @@ func ModelArtsInferenceJobCreate(ctx *context.Context, option api.CreateTrainJob ModelName: modelName, ModelVersion: modelVersion, CkptName: ckptName, + ModelId: option.ModelId, ResultUrl: resultObsPath, Spec: spec, DatasetName: datasetNames, diff --git a/services/cloudbrain/cloudbrainTask/notebook.go b/services/cloudbrain/cloudbrainTask/notebook.go index d1586f00b0..b53618125e 100644 --- a/services/cloudbrain/cloudbrainTask/notebook.go +++ b/services/cloudbrain/cloudbrainTask/notebook.go @@ -10,11 +10,9 @@ import ( "strconv" "strings" - "code.gitea.io/gitea/services/cloudbrain/modelmanage" - "code.gitea.io/gitea/modules/notification" "code.gitea.io/gitea/modules/timeutil" - + "code.gitea.io/gitea/services/cloudbrain/modelmanage" "code.gitea.io/gitea/modules/grampus" "code.gitea.io/gitea/services/lock" @@ -227,7 +225,8 @@ func GrampusNotebookCreate(ctx *context.Context, option api.CreateNotebookOption if option.ModelName != "" { //使用预训练模型 - m, err := models.QueryModelByPath(option.PreTrainModelUrl) + m, err := models.QueryModelById(option.ModelId) + //(option.PreTrainModelUrl) if err != nil { log.Error("Can not find model", err) @@ -242,6 +241,7 @@ func GrampusNotebookCreate(ctx *context.Context, option api.CreateNotebookOption req.ModelName = option.ModelName req.LabelName = option.LabelName req.CkptName = option.CkptName + req.ModelId = option.ModelId req.ModelVersion = option.ModelVersion req.PreTrainModelUrl = option.PreTrainModelUrl req.PreTrainModelPath = getPreTrainModelPath(option.PreTrainModelUrl, option.CkptName) @@ -1086,6 +1086,7 @@ func GrampusNotebookRestart(ctx *context.Context) { LabelName: task.LabelName, PreTrainModelUrl: task.PreTrainModelUrl, CkptName: task.CkptName, + ModelId: task.ModelId, WorkServerNumber: 1, } diff --git a/services/cloudbrain/cloudbrainTask/train.go b/services/cloudbrain/cloudbrainTask/train.go index 90bb9a9571..5c504a42fd 100644 --- a/services/cloudbrain/cloudbrainTask/train.go +++ b/services/cloudbrain/cloudbrainTask/train.go @@ -110,9 +110,21 @@ func CloudbrainOneTrainJobCreate(ctx *context.Context, option api.CreateTrainJob req.ModelName = option.ModelName req.LabelName = option.LabelName req.CkptName = option.CkptName + req.ModelId = option.ModelId req.ModelVersion = option.ModelVersion - req.PreTrainModelPath = setting.Attachment.Minio.RealPath + option.PreTrainModelUrl - req.PreTrainModelUrl = option.PreTrainModelUrl + minioPreModelURL, err := dealModelInfo(option.ModelId, jobName, option.CkptName) + if err != nil { + log.Error("Can not find model", err) + //cloudBrainNewDataPrepare(ctx, jobType) + //ctx.RenderWithErr(ctx.Tr("repo.modelconvert.manage.model_not_exist"), tpl, &form) + ctx.JSON(http.StatusOK, models.BaseErrorMessageApi(ctx.Tr("repo.modelconvert.manage.model_not_exist"))) + return + } + req.PreTrainModelPath = setting.Attachment.Minio.RealPath + minioPreModelURL + req.PreTrainModelUrl = minioPreModelURL + + //req.PreTrainModelPath = setting.Attachment.Minio.RealPath + option.PreTrainModelUrl + //req.PreTrainModelUrl = option.PreTrainModelUrl } @@ -282,6 +294,7 @@ func ModelArtsTrainJobNpuCreate(ctx *context.Context, option api.CreateTrainJobO req.ModelName = option.ModelName req.LabelName = option.LabelName req.CkptName = option.CkptName + req.ModelId = option.ModelId req.ModelVersion = option.ModelVersion req.PreTrainModelUrl = option.PreTrainModelUrl @@ -414,9 +427,11 @@ func GrampusTrainJobGpuCreate(ctx *context.Context, option api.CreateTrainJobOpt req.ModelName = option.ModelName req.LabelName = option.LabelName req.CkptName = option.CkptName + req.ModelId = option.ModelId req.ModelVersion = option.ModelVersion req.PreTrainModelUrl = option.PreTrainModelUrl - + preTrainModelPath := getPreTrainModelPath(option.PreTrainModelUrl, option.CkptName) + req.PreTrainModelPath = preTrainModelPath } jobId, err := grampus.GenerateTrainJob(ctx, req) @@ -641,6 +656,7 @@ func GrampusTrainJobNpuCreate(ctx *context.Context, option api.CreateTrainJobOpt req.ModelName = option.ModelName req.LabelName = option.LabelName req.CkptName = option.CkptName + req.ModelId = option.ModelId req.ModelVersion = option.ModelVersion req.PreTrainModelUrl = option.PreTrainModelUrl req.PreTrainModelPath = preTrainModelPath diff --git a/services/cloudbrain/modelmanage/model_manage.go b/services/cloudbrain/modelmanage/model_manage.go index 4e2bf74fe4..26378cc7f8 100644 --- a/services/cloudbrain/modelmanage/model_manage.go +++ b/services/cloudbrain/modelmanage/model_manage.go @@ -23,11 +23,11 @@ func QueryModelFileByModel(model *models.AiModelManage) []storage.FileInfo { } func HasModelFile(task *models.Cloudbrain) bool { - if task.PreTrainModelUrl == "" { + if task.ModelId == "" { return true } - model, err := models.QueryModelByPath(task.PreTrainModelUrl) + model, err := models.QueryModelById(task.ModelId) if err != nil { log.Error("Can not find model", err) return false diff --git a/templates/admin/model/list.tmpl b/templates/admin/model/list.tmpl new file mode 100644 index 0000000000..2e2375dfb8 --- /dev/null +++ b/templates/admin/model/list.tmpl @@ -0,0 +1,97 @@ +{{template "base/head" .}} +
+ {{template "admin/navbar" .}} +
+ {{template "base/alert" .}} + +

+ {{.i18n.Tr "repo.model.manage.model_manage"}} ({{.i18n.Tr "admin.total" .Total}}) +

+
+ {{template "admin/model/search" .}} +
+
+
+
+ + +
+
+
+ +
+ + + + + + + + + + + + + + + {{range .models}} + + + + + + + + + + + {{end}} + +
ID{{.i18n.Tr "admin.datasets.name"}}{{.i18n.Tr "admin.repos.size"}}{{.i18n.Tr "repo.issues.filter_sort.downloadtimes"}}{{.i18n.Tr "repo.issues.filter_sort.citations"}}{{.i18n.Tr "admin.datasets.private"}}{{.i18n.Tr "admin.users.created"}}{{.i18n.Tr "admin.notices.op"}}
{{.ID}}{{.Name}}{{if .Recommend}}{{end}}{{SizeFmt .Size}}{{.DownloadCount}}{{.ReferenceCount}}{{.CreatedUnix.FormatShort}}{{if .Recommend}}{{$.i18n.Tr "admin.datasets.unrecommend"}}{{else}}{{$.i18n.Tr "admin.datasets.recommend"}}{{end}}
+
+ {{template "base/paginate" .}} +
+
+{{template "base/footer" .}} + \ No newline at end of file diff --git a/templates/admin/model/search.tmpl b/templates/admin/model/search.tmpl new file mode 100644 index 0000000000..f0555c6d4f --- /dev/null +++ b/templates/admin/model/search.tmpl @@ -0,0 +1,34 @@ + +
+
+ + +
+
\ No newline at end of file diff --git a/templates/admin/navbar.tmpl b/templates/admin/navbar.tmpl index 5dd4304663..d234826ae6 100644 --- a/templates/admin/navbar.tmpl +++ b/templates/admin/navbar.tmpl @@ -21,6 +21,9 @@ {{.i18n.Tr "admin.datasets"}} + + {{.i18n.Tr "admin.models"}} + {{.i18n.Tr "repo.cloudbrain.task"}} diff --git a/templates/base/footer_content.tmpl b/templates/base/footer_content.tmpl index 8e81a7a547..30caa22ac7 100755 --- a/templates/base/footer_content.tmpl +++ b/templates/base/footer_content.tmpl @@ -65,7 +65,7 @@ {{if .IsSigned}} {{.i18n.Tr "custom.foot.advice_feedback"}} {{else}} - {{.i18n.Tr "custom.foot.advice_feedback"}} + {{.i18n.Tr "custom.foot.advice_feedback"}} {{end}} {{.i18n.Tr "custom.resource_description"}} diff --git a/templates/base/head_navbar.tmpl b/templates/base/head_navbar.tmpl index e8479fcc22..ad0a32ce7b 100755 --- a/templates/base/head_navbar.tmpl +++ b/templates/base/head_navbar.tmpl @@ -42,6 +42,7 @@ {{.i18n.Tr "repo.model_manager"}} @@ -84,6 +85,7 @@ {{.i18n.Tr "repo.model_manager"}} diff --git a/templates/base/head_navbar_fluid.tmpl b/templates/base/head_navbar_fluid.tmpl index 6e7e9c2c4f..11b9a58eab 100644 --- a/templates/base/head_navbar_fluid.tmpl +++ b/templates/base/head_navbar_fluid.tmpl @@ -39,6 +39,7 @@ {{.i18n.Tr "repo.model_manager"}} @@ -80,6 +81,7 @@ {{.i18n.Tr "repo.model_manager"}} diff --git a/templates/base/head_navbar_home.tmpl b/templates/base/head_navbar_home.tmpl index 09ec1ffe1e..551d53573f 100644 --- a/templates/base/head_navbar_home.tmpl +++ b/templates/base/head_navbar_home.tmpl @@ -31,6 +31,7 @@ {{.i18n.Tr "repo.model_manager"}} @@ -73,6 +74,7 @@ {{.i18n.Tr "repo.model_manager"}} diff --git a/templates/base/head_navbar_pro.tmpl b/templates/base/head_navbar_pro.tmpl index 77058115cd..16f649f5e4 100644 --- a/templates/base/head_navbar_pro.tmpl +++ b/templates/base/head_navbar_pro.tmpl @@ -41,6 +41,7 @@ {{.i18n.Tr "repo.model_manager"}} @@ -83,6 +84,7 @@ {{.i18n.Tr "repo.model_manager"}} diff --git a/templates/model/square/index.tmpl b/templates/model/square/index.tmpl new file mode 100644 index 0000000000..ffe6c916dd --- /dev/null +++ b/templates/model/square/index.tmpl @@ -0,0 +1,5 @@ +{{template "base/head_home" .}} + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/cloudbrain/benchmark/new.tmpl b/templates/repo/cloudbrain/benchmark/new.tmpl index f0626a23af..83ca5c9e07 100755 --- a/templates/repo/cloudbrain/benchmark/new.tmpl +++ b/templates/repo/cloudbrain/benchmark/new.tmpl @@ -88,7 +88,12 @@ {{.i18n.Tr "cloudbrain.view_sample"}} - {{template "custom/select_model_required" .}} + +
+
+
+
diff --git a/templates/repo/cloudbrain/inference/new.tmpl b/templates/repo/cloudbrain/inference/new.tmpl index 785be538a2..9eef6e1d88 100644 --- a/templates/repo/cloudbrain/inference/new.tmpl +++ b/templates/repo/cloudbrain/inference/new.tmpl @@ -50,7 +50,6 @@ {{.CsrfTokenHtml}} - @@ -106,8 +105,10 @@
- +

{{.i18n.Tr "repo.modelarts.train_job.parameter_setting"}}:

+ + -
- + --> +
+
+
+ +
@@ -293,6 +298,7 @@ let nameMap,nameList // 获取模型列表和模型名称对应的模型版本 $(document).ready(function(){ + return; // 用模型选择组件了 modelVersion() modelCkpt() $.get(`${RepoLink}/modelmanage/query_model_for_predict?type=0`, (data) => { diff --git a/templates/repo/cloudbrain/new.tmpl b/templates/repo/cloudbrain/new.tmpl index 2c9638ab1a..d5bc9bf01f 100755 --- a/templates/repo/cloudbrain/new.tmpl +++ b/templates/repo/cloudbrain/new.tmpl @@ -145,36 +145,15 @@ {{end}}
- - {{template "custom/select_model" .}} -
- -
-
- + +
+
+
+
+
- -
-
-
- - -
-
- - -
- + +
+ + +
+
+ + +
+
+ + +
+
diff --git a/templates/repo/cloudbrain/trainjob/new.tmpl b/templates/repo/cloudbrain/trainjob/new.tmpl index 55041e77e5..7aa842e301 100755 --- a/templates/repo/cloudbrain/trainjob/new.tmpl +++ b/templates/repo/cloudbrain/trainjob/new.tmpl @@ -156,7 +156,12 @@ {{end}}
- {{template "custom/select_model" .}} + +
+
+
+
- {{template "custom/select_model" .}} + +
+
+
+
diff --git a/templates/repo/grampus/notebook/npu/new.tmpl b/templates/repo/grampus/notebook/npu/new.tmpl index 5046226977..24daecc03f 100644 --- a/templates/repo/grampus/notebook/npu/new.tmpl +++ b/templates/repo/grampus/notebook/npu/new.tmpl @@ -95,7 +95,12 @@ {{end}}
- {{template "custom/select_model" .}} + +
+
+
+
- {{template "custom/select_model" .}} + +
+
+
+
+
+
diff --git a/templates/repo/grampus/trainjob/npu/new.tmpl b/templates/repo/grampus/trainjob/npu/new.tmpl index c4238dbd39..6755473b3f 100755 --- a/templates/repo/grampus/trainjob/npu/new.tmpl +++ b/templates/repo/grampus/trainjob/npu/new.tmpl @@ -143,10 +143,15 @@ {{end}}
- {{template "custom/select_model" .}} + +
+
+
+
- {{if .image_id}} {{range .images}} {{if eq $.image_id .ID}} diff --git a/templates/repo/grampus/trainjob/show.tmpl b/templates/repo/grampus/trainjob/show.tmpl index 6fc02c798f..121cee9341 100755 --- a/templates/repo/grampus/trainjob/show.tmpl +++ b/templates/repo/grampus/trainjob/show.tmpl @@ -783,6 +783,7 @@ type: 'POST', data: data, success: function (res) { + const modelName = $('#formId #name').val(); $('input[name="engine_name"]').val(""); $('input[name="engine"]').val(""); $('input[name="jobId"]').val(""); @@ -791,7 +792,7 @@ var cityObj = $("#modelSelectedFile"); cityObj.attr("value", ""); document.getElementById("formId").reset(); - location.href = `/${userName}/${repoPath}/modelmanage/show_model` + location.href = `/${userName}/${repoPath}/modelmanage/model_readme_tmpl?name=${encodeURIComponent(modelName)}` $('.ui.modal.second').modal('hide') }, error: function (xhr) { diff --git a/templates/repo/modelarts/inferencejob/index.tmpl b/templates/repo/modelarts/inferencejob/index.tmpl index c31500089b..d462c0acbe 100644 --- a/templates/repo/modelarts/inferencejob/index.tmpl +++ b/templates/repo/modelarts/inferencejob/index.tmpl @@ -119,9 +119,21 @@
- +
- {{.ModelName}}  {{.ModelVersion}} + + {{.ModelName}}  {{.ModelVersion}}
diff --git a/templates/repo/modelarts/inferencejob/new.tmpl b/templates/repo/modelarts/inferencejob/new.tmpl index 364dae4ffe..19ad10a755 100644 --- a/templates/repo/modelarts/inferencejob/new.tmpl +++ b/templates/repo/modelarts/inferencejob/new.tmpl @@ -49,7 +49,6 @@ - @@ -105,8 +104,9 @@ {{end}}
-

{{.i18n.Tr "repo.modelarts.train_job.parameter_setting"}}:

+ + +
+
+
+
@@ -323,6 +329,7 @@ // 获取模型列表和模型名称对应的模型版本 $(document).ready(function(){ + return; // 用模型选择组件了 modelVersion() modelCkpt() $.get(`${RepoLink}/modelmanage/query_model_for_predict?type=1`, (data) => { diff --git a/templates/repo/modelarts/notebook/new.tmpl b/templates/repo/modelarts/notebook/new.tmpl index 98bcf714ee..0d96955abd 100755 --- a/templates/repo/modelarts/notebook/new.tmpl +++ b/templates/repo/modelarts/notebook/new.tmpl @@ -73,7 +73,12 @@

{{.i18n.Tr "repo.modelarts.train_job.parameter_setting"}}:

- {{template "custom/select_model" .}} + +
+
+
+
- {{template "custom/select_model" .}} + +
+
+
+
diff --git a/templates/repo/modelarts/trainjob/show.tmpl b/templates/repo/modelarts/trainjob/show.tmpl index 0bdd7f61e5..7f52b1a079 100755 --- a/templates/repo/modelarts/trainjob/show.tmpl +++ b/templates/repo/modelarts/trainjob/show.tmpl @@ -788,6 +788,7 @@ type: 'POST', data: data, success: function (res) { + const modelName = $('#formId #name').val(); $('input[name="engine_name"]').val(""); $('input[name="engine"]').val(""); $('input[name="jobId"]').val(""); @@ -796,7 +797,7 @@ var cityObj = $("#modelSelectedFile"); cityObj.attr("value", ""); document.getElementById("formId").reset(); - location.href = `/${userName}/${repoPath}/modelmanage/show_model` + location.href = `/${userName}/${repoPath}/modelmanage/model_readme_tmpl?name=${encodeURIComponent(modelName)}` $('.ui.modal.second').modal('hide') }, error: function (xhr) { diff --git a/templates/repo/modelarts/trainjob/version_new.tmpl b/templates/repo/modelarts/trainjob/version_new.tmpl index 30a10ac556..07fae09d59 100644 --- a/templates/repo/modelarts/trainjob/version_new.tmpl +++ b/templates/repo/modelarts/trainjob/version_new.tmpl @@ -125,7 +125,12 @@
- {{template "custom/select_model" .}} + +
+
+
+
diff --git a/templates/repo/modelmanage/create_local_1.tmpl b/templates/repo/modelmanage/create_local.tmpl similarity index 72% rename from templates/repo/modelmanage/create_local_1.tmpl rename to templates/repo/modelmanage/create_local.tmpl index ed47a907ea..7bf3d6aaf1 100644 --- a/templates/repo/modelmanage/create_local_1.tmpl +++ b/templates/repo/modelmanage/create_local.tmpl @@ -1,5 +1,5 @@ {{template "base/head" .}} - +
{{template "repo/header" .}} @@ -7,5 +7,5 @@
- + {{template "base/footer" .}} diff --git a/templates/repo/modelmanage/create_local_2.tmpl b/templates/repo/modelmanage/create_local_2.tmpl deleted file mode 100644 index 5780c6194c..0000000000 --- a/templates/repo/modelmanage/create_local_2.tmpl +++ /dev/null @@ -1,11 +0,0 @@ -{{template "base/head" .}} - -
- {{template "repo/header" .}} - -
-
-
-
- -{{template "base/footer" .}} diff --git a/templates/repo/modelmanage/create_online.tmpl b/templates/repo/modelmanage/create_online.tmpl index e11ceb47e0..1b5adbc17f 100644 --- a/templates/repo/modelmanage/create_online.tmpl +++ b/templates/repo/modelmanage/create_online.tmpl @@ -579,8 +579,8 @@ url: url_href, type: "POST", data: data, - success: function (res) { - backToModelListPage(); + success: function (res) { + goDetailModelPage(cName); }, error: function (xhr) { // 隐藏 loading @@ -598,7 +598,12 @@ let url_href = location.href.split("create_online_model")[0] + 'show_model'; window.location.href = url_href; } + function goDetailModelPage(name) { + let url_href = location.href.split("create_online_model")[0] + 'model_readme_tmpl?name=' + encodeURIComponent(name); + window.location.href = url_href; + } window.submitSaveModel = submitSaveModel; window.backToModelListPage = backToModelListPage; + window.goDetailModelPage = goDetailModelPage; })(); diff --git a/templates/repo/modelmanage/evolution_map.tmpl b/templates/repo/modelmanage/evolution_map.tmpl new file mode 100644 index 0000000000..65b5bd1e62 --- /dev/null +++ b/templates/repo/modelmanage/evolution_map.tmpl @@ -0,0 +1,5 @@ +{{template "base/head_home" .}} + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/modelmanage/filelist.tmpl b/templates/repo/modelmanage/filelist.tmpl new file mode 100644 index 0000000000..6f111cbe14 --- /dev/null +++ b/templates/repo/modelmanage/filelist.tmpl @@ -0,0 +1,5 @@ +{{template "base/head_home" .}} + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/modelmanage/fileupload.tmpl b/templates/repo/modelmanage/fileupload.tmpl new file mode 100644 index 0000000000..177f3c6ed3 --- /dev/null +++ b/templates/repo/modelmanage/fileupload.tmpl @@ -0,0 +1,6 @@ +{{template "base/head" .}} + + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/modelmanage/index.tmpl b/templates/repo/modelmanage/index.tmpl index 3f3d3e217b..4e3b6658cb 100644 --- a/templates/repo/modelmanage/index.tmpl +++ b/templates/repo/modelmanage/index.tmpl @@ -76,7 +76,7 @@
{{$.i18n.Tr "repo.model.manage.import_local_model"}} + href="{{.RepoLink}}/modelmanage/create_local_model">{{$.i18n.Tr "repo.model.manage.import_local_model"}} {{$.i18n.Tr "repo.model.manage.import_online_model"}}
diff --git a/templates/repo/modelmanage/readme.tmpl b/templates/repo/modelmanage/readme.tmpl new file mode 100644 index 0000000000..4770ca5f39 --- /dev/null +++ b/templates/repo/modelmanage/readme.tmpl @@ -0,0 +1,5 @@ +{{template "base/head_home" .}} + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/modelmanage/setting.tmpl b/templates/repo/modelmanage/setting.tmpl new file mode 100644 index 0000000000..c67892e3d5 --- /dev/null +++ b/templates/repo/modelmanage/setting.tmpl @@ -0,0 +1,6 @@ +{{template "base/head_home" .}} + + +
+ +{{template "base/footer" .}} diff --git a/templates/repo/modelsafety/new.tmpl b/templates/repo/modelsafety/new.tmpl index c4916f4e75..2e6060f626 100644 --- a/templates/repo/modelsafety/new.tmpl +++ b/templates/repo/modelsafety/new.tmpl @@ -141,7 +141,12 @@ onkeydown="this.value=this.value.substring(0, 255)" onkeyup="this.value=this.value.substring(0, 255)">{{.description}}
- {{template "custom/select_model_required" .}} + +
+
+
+
+
+ + +
- - -
-
+
-
{{ type == '1' ? $t('modelManage.confirm') : - $t('modelManage.createModel') + $t('modelManage.createModel') }} {{ $t('modelManage.cancel') }} @@ -146,18 +105,18 @@ export default { type: '0', // 1-修改,其它-新增 loading: false, state: { - type: 0, + type: 1, name: REPO_NAME + '_model_' + Math.random().toString(36).substr(2, 4), version: '0.0.1', engine: '0', label: '', description: '', - isPrivate : false, + isPrivate: false, }, nameErr: false, isShowVersion: false, engineList: MODEL_ENGINES, - repoIsPrivate: REPOISPRIVATE, + repoIsPrivate: REPOISPRIVATE, }; }, components: {}, @@ -170,7 +129,6 @@ export default { const hasEndSpace = this.state.label[this.state.label.length - 1] == ' '; const list = this.state.label.trim().split(' ').filter(label => label != ''); this.state.label = list.slice(0, MAX_LABEL_COUNT).join(' ') + (hasEndSpace && list.length < MAX_LABEL_COUNT ? ' ' : ''); - }, submit() { if (!this.checkName()) { @@ -181,19 +139,20 @@ export default { return; } var radio = document.getElementsByName("isPrivate"); - if(radio != null && radio.length > 0){ - for (var i=0; i 0) { + for (var i = 0; i < radio.length; i++) { if (radio[i].checked) { - this.state.isPrivate=radio[i].value; + this.state.isPrivate = radio[i].value; } } - }else{ + } else { this.state.isPrivate = true; } const submintApi = this.type == '1' ? modifyModel : saveLocalModel; submintApi({ repo: location.pathname.split('/').slice(0, 3).join('/'), ...this.state, + label: this.state.label.split(/\s+/).join(' ').trim(), }).then(res => { res = res.data; if (res && res.code == '0') { @@ -203,7 +162,7 @@ export default { } const list = window.location.href.split('/'); list.pop(); - list.push('create_local_model_2'); + list.push('model_fileupload_tmpl'); window.location.href = list.join('/') + '?type=0&name=' + encodeURIComponent(this.state.name) + '&id=' + res.id; } else if (res && res.code == '-1') { this.$message({ @@ -257,12 +216,12 @@ export default { this.state.label = data.label; this.state.description = data.description; this.state.isPrivate = data.isPrivate; - if(data.isPrivate){ - $('#isPrivate_true').attr("checked",true); - $('#isPrivate_false').attr("checked",false); - }else{ - $('#isPrivate_true').attr("checked",false); - $('#isPrivate_false').attr("checked",true); + if (data.isPrivate) { + $('#isPrivate_true').attr("checked", true); + $('#isPrivate_false').attr("checked", false); + } else { + $('#isPrivate_true').attr("checked", false); + $('#isPrivate_false').attr("checked", true); } } }).catch(err => { @@ -272,8 +231,7 @@ export default { }); } }, - beforeDestroy() { - }, + beforeDestroy() { }, }; diff --git a/web_src/vuepages/pages/modelmanage/local/vp-modelmanage-local-create.js b/web_src/vuepages/pages/modelmanage/local/vp-modelmanage-local-create.js new file mode 100644 index 0000000000..48e5425001 --- /dev/null +++ b/web_src/vuepages/pages/modelmanage/local/vp-modelmanage-local-create.js @@ -0,0 +1,17 @@ +import Vue from 'vue'; +import ElementUI from 'element-ui'; +import 'element-ui/lib/theme-chalk/index.css'; +import localeEn from 'element-ui/lib/locale/lang/en'; +import localeZh from 'element-ui/lib/locale/lang/zh-CN'; +import { i18n, lang } from '~/langs'; +import App from './index.vue'; + +Vue.use(ElementUI, { + locale: lang === 'zh-CN' ? localeZh : localeEn, + size: 'small', +}); + +new Vue({ + i18n, + render: (h) => h(App), +}).$mount('#__vue-root'); diff --git a/web_src/vuepages/pages/modelmanage/settings/index.vue b/web_src/vuepages/pages/modelmanage/settings/index.vue new file mode 100644 index 0000000000..1787a36a6e --- /dev/null +++ b/web_src/vuepages/pages/modelmanage/settings/index.vue @@ -0,0 +1,422 @@ + + + + + diff --git a/web_src/vuepages/pages/modelmanage/settings/vp-model-settings.js b/web_src/vuepages/pages/modelmanage/settings/vp-model-settings.js new file mode 100644 index 0000000000..48e5425001 --- /dev/null +++ b/web_src/vuepages/pages/modelmanage/settings/vp-model-settings.js @@ -0,0 +1,17 @@ +import Vue from 'vue'; +import ElementUI from 'element-ui'; +import 'element-ui/lib/theme-chalk/index.css'; +import localeEn from 'element-ui/lib/locale/lang/en'; +import localeZh from 'element-ui/lib/locale/lang/zh-CN'; +import { i18n, lang } from '~/langs'; +import App from './index.vue'; + +Vue.use(ElementUI, { + locale: lang === 'zh-CN' ? localeZh : localeEn, + size: 'small', +}); + +new Vue({ + i18n, + render: (h) => h(App), +}).$mount('#__vue-root'); diff --git a/web_src/vuepages/pages/modelsquare/square/components/ModelCondition.vue b/web_src/vuepages/pages/modelsquare/square/components/ModelCondition.vue new file mode 100644 index 0000000000..b13d8aecd8 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/components/ModelCondition.vue @@ -0,0 +1,168 @@ + + + + + diff --git a/web_src/vuepages/pages/modelsquare/square/components/ModelFilters.vue b/web_src/vuepages/pages/modelsquare/square/components/ModelFilters.vue new file mode 100644 index 0000000000..12072e7376 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/components/ModelFilters.vue @@ -0,0 +1,247 @@ + + + + + diff --git a/web_src/vuepages/pages/modelsquare/square/components/ModelItem.vue b/web_src/vuepages/pages/modelsquare/square/components/ModelItem.vue new file mode 100644 index 0000000000..40c1e498f1 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/components/ModelItem.vue @@ -0,0 +1,242 @@ + + + + + diff --git a/web_src/vuepages/pages/modelsquare/square/components/ModelList.vue b/web_src/vuepages/pages/modelsquare/square/components/ModelList.vue new file mode 100644 index 0000000000..9fd320c131 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/components/ModelList.vue @@ -0,0 +1,173 @@ + + + + + diff --git a/web_src/vuepages/pages/modelsquare/square/index.vue b/web_src/vuepages/pages/modelsquare/square/index.vue new file mode 100644 index 0000000000..ec402d7b75 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/index.vue @@ -0,0 +1,107 @@ + + + + + diff --git a/web_src/vuepages/pages/modelsquare/square/vp-model-square.js b/web_src/vuepages/pages/modelsquare/square/vp-model-square.js new file mode 100644 index 0000000000..48e5425001 --- /dev/null +++ b/web_src/vuepages/pages/modelsquare/square/vp-model-square.js @@ -0,0 +1,17 @@ +import Vue from 'vue'; +import ElementUI from 'element-ui'; +import 'element-ui/lib/theme-chalk/index.css'; +import localeEn from 'element-ui/lib/locale/lang/en'; +import localeZh from 'element-ui/lib/locale/lang/zh-CN'; +import { i18n, lang } from '~/langs'; +import App from './index.vue'; + +Vue.use(ElementUI, { + locale: lang === 'zh-CN' ? localeZh : localeEn, + size: 'small', +}); + +new Vue({ + i18n, + render: (h) => h(App), +}).$mount('#__vue-root'); diff --git a/web_src/vuepages/pages/reward/point/utils.js b/web_src/vuepages/pages/reward/point/utils.js index e2a06f86a6..9341fb2a6d 100644 --- a/web_src/vuepages/pages/reward/point/utils.js +++ b/web_src/vuepages/pages/reward/point/utils.js @@ -125,7 +125,7 @@ export const getRewardPointRecordInfo = (record) => { : `${getPointAction(record.SourceTemplateId)}(${i18n.t('repositoryWasDel')})`; break; case 'CreateNewModelTask': // 导入新模型 - 导入了新模型resnet50_qx7l - out.remark = record.Action ? `${i18n.t('createdNewModel')}${record.Action?.RefName}` + out.remark = record.Action ? `${i18n.t('createdNewModel')}${record.Action?.RefName}` : `${getPointAction(record.SourceTemplateId)}(${i18n.t('repositoryWasDel')})`; break; case 'BindWechat': // 完成微信扫码验证 - 首次绑定微信奖励 diff --git a/web_src/vuepages/utils/index.js b/web_src/vuepages/utils/index.js index ac53a00245..f5904cd815 100644 --- a/web_src/vuepages/utils/index.js +++ b/web_src/vuepages/utils/index.js @@ -127,3 +127,16 @@ export const timeSinceUnix = (then, now) => { const out = computeTimeDiff(diff); return i18n.t(lbl, { msg: out.diffStr }); }; + +export const setWebpackPublicPath = () => { + // This sets up webpack's chunk loading to load resources from the 'public' + // directory. This file must be imported before any lazy-loading is being attempted. + if (document.currentScript && document.currentScript.src) { + const url = new URL(document.currentScript.src); + __webpack_public_path__ = url.pathname.replace(/\/[^/]*?\/[^/]*?$/, '/'); + } else { + // compat: IE11 + const script = document.querySelector('script[src*="/index.js"]'); + __webpack_public_path__ = script.getAttribute('src').replace(/\/[^/]*?\/[^/]*?$/, '/'); + } +}; diff --git a/web_src/vuepages/utils/treelayout/algorithm.js b/web_src/vuepages/utils/treelayout/algorithm.js new file mode 100644 index 0000000000..8286c73883 --- /dev/null +++ b/web_src/vuepages/utils/treelayout/algorithm.js @@ -0,0 +1,208 @@ +class Tree { + constructor(width, height, y, children) { + this.w = width + this.h = height + this.y = y + this.c = children + this.cs = children.length + + this.x = 0 + this.prelim = 0 + this.mod = 0 + this.shift = 0 + this.change = 0 + this.tl = null // Left thread + this.tr = null // Right thread + this.el = null // extreme left nodes + this.er = null // extreme right nodes + //sum of modifiers at the extreme nodes + this.msel = 0 + this.mser = 0 + } +} + +function setExtremes(tree) { + if (tree.cs === 0) { + tree.el = tree + tree.er = tree + tree.msel = tree.mser = 0 + } else { + tree.el = tree.c[0].el + tree.msel = tree.c[0].msel + tree.er = tree.c[tree.cs - 1].er + tree.mser = tree.c[tree.cs - 1].mser + } +} + +function bottom(tree) { + return tree.y + tree.h +} + +/* A linked list of the indexes of left siblings and their lowest vertical coordinate. + */ +class IYL { + constructor(lowY, index, next) { + this.lowY = lowY + this.index = index + this.next = next + } +} + +function updateIYL(minY, i, ih) { + // Remove siblings that are hidden by the new subtree. + while (ih !== null && minY >= ih.lowY) { + // Prepend the new subtree + ih = ih.next + } + return new IYL(minY, i, ih) +} + +function distributeExtra(tree, i, si, distance) { + // Are there intermediate children? + if (si !== i - 1) { + const nr = i - si + tree.c[si + 1].shift += distance / nr + tree.c[i].shift -= distance / nr + tree.c[i].change -= distance - distance / nr + } +} + +function moveSubtree(tree, i, si, distance) { + // Move subtree by changing mod. + tree.c[i].mod += distance + tree.c[i].msel += distance + tree.c[i].mser += distance + distributeExtra(tree, i, si, distance) +} + +function nextLeftContour(tree) { + return tree.cs === 0 ? tree.tl : tree.c[0] +} + +function nextRightContour(tree) { + return tree.cs === 0 ? tree.tr : tree.c[tree.cs - 1] +} + +function setLeftThread(tree, i, cl, modsumcl) { + const li = tree.c[0].el + li.tl = cl + // Change mod so that the sum of modifier after following thread is correct. + const diff = (modsumcl - cl.mod) - tree.c[0].msel + li.mod += diff + // Change preliminary x coordinate so that the node does not move. + li.prelim -= diff + // Update extreme node and its sum of modifiers. + tree.c[0].el = tree.c[i].el + tree.c[0].msel = tree.c[i].msel +} + +// Symmetrical to setLeftThread +function setRightThread(tree, i, sr, modsumsr) { + const ri = tree.c[i].er + ri.tr = sr + const diff = (modsumsr - sr.mod) - tree.c[i].mser + ri.mod += diff + ri.prelim -= diff + tree.c[i].er = tree.c[i - 1].er + tree.c[i].mser = tree.c[i - 1].mser +} + +function seperate(tree, i, ih) { + // Right contour node of left siblings and its sum of modifiers. + let sr = tree.c[i - 1] + let mssr = sr.mod + // Left contour node of right siblings and its sum of modifiers. + let cl = tree.c[i] + let mscl = cl.mod + while (sr !== null && cl !== null) { + if (bottom(sr) > ih.lowY) { + ih = ih.next + } + // How far to the left of the right side of sr is the left side of cl. + const distance = mssr + sr.prelim + sr.w - (mscl + cl.prelim) + if (distance > 0) { + mscl += distance + moveSubtree(tree, i, ih.index, distance) + } + + const sy = bottom(sr) + const cy = bottom(cl) + if (sy <= cy) { + sr = nextRightContour(sr) + if (sr !== null) { + mssr += sr.mod + } + } + if (sy >= cy) { + cl = nextLeftContour(cl) + if (cl !== null) { + mscl += cl.mod + } + } + } + + // Set threads and update extreme nodes. + // In the first case, the current subtree must be taller than the left siblings. + if (sr === null && cl !== null) { + setLeftThread(tree, i, cl, mscl) + } else if (sr !== null && cl === null) { + setRightThread(tree, i, sr, mssr) + } +} + +function positionRoot(tree) { + // Position root between children, taking into account their mod. + tree.prelim = + (tree.c[0].prelim + + tree.c[0].mod + + tree.c[tree.cs - 1].mod + + tree.c[tree.cs - 1].prelim + + tree.c[tree.cs - 1].w) / + 2 - + tree.w / 2 +} + +function firstWalk(tree) { + if (tree.cs === 0) { + setExtremes(tree) + return + } + + firstWalk(tree.c[0]) + let ih = updateIYL(bottom(tree.c[0].el), 0, null) + for (let i = 1; i < tree.cs; i++) { + firstWalk(tree.c[i]) + const minY = bottom(tree.c[i].er) + seperate(tree, i, ih) + ih = updateIYL(minY, i, ih) + } + positionRoot(tree) + setExtremes(tree) +} + +function addChildSpacing(tree) { + let d = 0 + let modsumdelta = 0 + for (let i = 0; i < tree.cs; i++) { + d += tree.c[i].shift + modsumdelta += d + tree.c[i].change + tree.c[i].mod += modsumdelta + } +} + +function secondWalk(tree, modsum) { + modsum += tree.mod + // Set absolute (no-relative) horizontal coordinates. + tree.x = tree.prelim + modsum + addChildSpacing(tree) + for (let i = 0; i < tree.cs; i++) { + secondWalk(tree.c[i], modsum) + } +} + +function layout(tree) { + firstWalk(tree) + secondWalk(tree, 0) +} + +export { Tree, layout } diff --git a/web_src/vuepages/utils/treelayout/helpers.js b/web_src/vuepages/utils/treelayout/helpers.js new file mode 100644 index 0000000000..97156d3b05 --- /dev/null +++ b/web_src/vuepages/utils/treelayout/helpers.js @@ -0,0 +1,128 @@ +import { layout, Tree } from './algorithm' + +class BoundingBox { + /** + * @param {number} gap - the gap between sibling nodes + * @param {number} bottomPadding - the height reserved for connection drawing + */ + constructor(gap, bottomPadding) { + this.gap = gap + this.bottomPadding = bottomPadding + } + + addBoundingBox(width, height) { + return { width: width + this.gap, height: height + this.bottomPadding } + } + + /** + * Return the coordinate without the bounding box for a node + */ + removeBoundingBox(x, y) { + return { x: x + this.gap / 2, y } + } +} + +class Layout { + constructor(boundingBox) { + this.bb = boundingBox + } + + /** + * Layout treeData. + * Return modified treeData and the bounding box encompassing all the nodes. + * + * See getSize() for more explanation. + */ + layout(treeData) { + const tree = this.convert(treeData) + layout(tree) + const { boundingBox, result } = this.assignLayout(tree, treeData) + + return { result, boundingBox } + } + + /** + * Returns Tree to layout, with bounding boxes added to each node. + */ + convert(treeData, y = 0) { + if (treeData === null) return null + + const { width, height } = this.bb.addBoundingBox( + treeData.width, + treeData.height + ) + let children = [] + if (treeData.children && treeData.children.length) { + for (let i = 0; i < treeData.children.length; i++) { + children[i] = this.convert(treeData.children[i], y + height) + } + } + + return new Tree(width, height, y, children) + } + + /** + * Assign layout tree x, y coordinates back to treeData, + * with bounding boxes removed. + */ + assignCoordinates(tree, treeData) { + const { x, y } = this.bb.removeBoundingBox(tree.x, tree.y) + treeData.x = x + treeData.y = y + for (let i = 0; i < tree.c.length; i++) { + this.assignCoordinates(tree.c[i], treeData.children[i]) + } + } + + /** + * Return the bounding box that encompasses all the nodes. + * The result has a structure of + * { left: number, right: number, top: number, bottom: nubmer}. + * This is not the same bounding box concept as the `BoundingBox` class + * used to construct `Layout` class. + */ + getSize(treeData, box = null) { + const { x, y, width, height } = treeData + if (box === null) { + box = { left: x, right: x + width, top: y, bottom: y + height } + } + box.left = Math.min(box.left, x) + box.right = Math.max(box.right, x + width) + box.top = Math.min(box.top, y) + box.bottom = Math.max(box.bottom, y + height) + + if (treeData.children) { + for (const child of treeData.children) { + this.getSize(child, box) + } + } + + return box + } + + /** + * This function does assignCoordinates and getSize in one pass. + */ + assignLayout(tree, treeData, box = null) { + const { x, y } = this.bb.removeBoundingBox(tree.x, tree.y) + treeData.x = x + treeData.y = y + + const { width, height } = treeData + if (box === null) { + box = { left: x, right: x + width, top: y, bottom: y + height } + } + box.left = Math.min(box.left, x) + box.right = Math.max(box.right, x + width) + box.top = Math.min(box.top, y) + box.bottom = Math.max(box.bottom, y + height) + + for (let i = 0; i < tree.c.length; i++) { + this.assignLayout(tree.c[i], treeData.children[i], box) + } + + return { result: treeData, boundingBox: box } + } +} + +export { Layout, BoundingBox } diff --git a/web_src/vuepages/utils/treelayout/index.js b/web_src/vuepages/utils/treelayout/index.js new file mode 100644 index 0000000000..c5f000c6d2 --- /dev/null +++ b/web_src/vuepages/utils/treelayout/index.js @@ -0,0 +1,4 @@ +import { layout, Tree } from './algorithm' +import { BoundingBox, Layout } from './helpers' + +export { layout, Tree, BoundingBox, Layout }