@@ -1,7 +1,14 @@
package repo
import (
"encoding/json"
"fmt"
"net/http"
"regexp"
"sort"
"strconv"
"strings"
"unicode/utf8"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
@@ -12,9 +19,14 @@ import (
)
const (
tplIndex base.TplName = "repo/datasets/index"
tplIndex base.TplName = "repo/datasets/index"
tplDatasetCreate base.TplName = "repo/datasets/create"
tplDatasetEdit base.TplName = "repo/datasets/edit"
taskstplIndex base.TplName = "repo/datasets/tasks/index"
)
var titlePattern = regexp.MustCompile(`^[A-Za-z0-9-_\\.]{1,100}$`)
// MustEnableDataset check if repository enable internal dataset
func MustEnableDataset(ctx *context.Context) {
if !ctx.Repo.CanRead(models.UnitTypeDatasets) {
@@ -84,43 +96,34 @@ func QueryDataSet(ctx *context.Context) []*models.Attachment {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
ctx.Data["SortType"] = ctx.Query("sort")
switch ctx.Query("sort") {
case "newest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
case "oldest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix < attachments[j].CreatedUnix
})
default:
ctx.Data["SortType"] = "newest"
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
}
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
return attachments
}
func DatasetIndex(ctx *context.Context) {
log.Info("dataset index 1")
MustEnableDataset(ctx)
ctx.Data["PageIsDataset"] = true
repo := ctx.Repo.Repository
dataset, err := models.GetDatasetByRepo(repo)
ctx.Data["CanWrite"] = ctx.Repo.CanWrite(models.UnitTypeDatasets)
if err != nil {
log.Error("query dataset, not found repo .")
ctx.NotFound("GetDatasetByRepo", err )
log.Warn("query dataset, not found .")
ctx.HTML(200, tplIndex )
return
}
cloudbrainType := -1
if ctx.Query("type") != "" {
if ctx.Query("type") == "" {
log.Error("query dataset, not found param type")
ctx.NotFound("type error", nil)
return
cloudbrainType = ctx.QueryInt("type")
}
err = models.GetDatasetAttachments(ctx.QueryInt("type") , ctx.IsSigned, ctx.User, dataset)
err = models.GetDatasetAttachments(cloudbrainType, ctx.IsSigned, ctx.User, dataset)
if err != nil {
ctx.ServerError("GetDatasetAttachments", err)
return
@@ -128,53 +131,138 @@ func DatasetIndex(ctx *context.Context) {
attachments := newFilterPrivateAttachments(ctx, dataset.Attachments, repo)
ctx.Data["SortType"] = ctx.Query("sort")
switch ctx.Query("sort") {
case "newest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
case "oldest":
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix < attachments[j].CreatedUnix
})
default:
ctx.Data["SortType"] = "newest"
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
sort.Slice(attachments, func(i, j int) bool {
return attachments[i].CreatedUnix > attachments[j].CreatedUnix
})
page := ctx.QueryInt("page")
if page <= 0 {
page = 1
}
pagesize := ctx.QueryInt("pagesize")
if pagesize <= 0 {
pagesize = 10
}
pager := context.NewPagination(len(attachments), pagesize, page, 5)
pageAttachments := getPageAttachments(attachments, page, pagesize)
//load attachment creator
for _, attachment := range pageAttachments {
uploader, _ := models.GetUserByID(attachment.UploaderID)
attachment.Uploader = uploader
}
ctx.Data["Page"] = pager
ctx.Data["PageIsDataset"] = true
ctx.Data["Title"] = ctx.Tr("dataset.show_dataset")
ctx.Data["Link"] = ctx.Repo.RepoLink + "/datasets"
ctx.Data["dataset"] = dataset
ctx.Data["Attachments"] = attachments
ctx.Data["Attachments"] = p ageA ttachments
ctx.Data["IsOwner"] = true
ctx.Data["StoreType"] = setting.Attachment.StoreType
ctx.Data["Type"] = ctx.QueryInt("type")
ctx.Data["Type"] = cloudbrainType
renderAttachmentSettings(ctx)
ctx.HTML(200, tplIndex)
}
func getPageAttachments(attachments []*models.Attachment, page int, pagesize int) []*models.Attachment {
begin := (page - 1) * pagesize
end := (page) * pagesize
if begin > len(attachments)-1 {
return nil
}
if end > len(attachments)-1 {
return attachments[begin:]
} else {
return attachments[begin:end]
}
}
func CreateDataset(ctx *context.Context) {
MustEnableDataset(ctx)
ctx.Data["PageIsDataset"] = true
ctx.HTML(200, tplDatasetCreate)
}
func EditDataset(ctx *context.Context) {
MustEnableDataset(ctx)
ctx.Data["PageIsDataset"] = true
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
dataset, _ := models.GetDatasetByID(datasetId)
if dataset == nil {
ctx.Error(http.StatusNotFound, "")
return
}
ctx.Data["Dataset"] = dataset
ctx.HTML(200, tplDatasetEdit)
}
func CreateDatasetPost(ctx *context.Context, form auth.CreateDatasetForm) {
dataset := &models.Dataset{}
if !titlePattern.MatchString(form.Title) {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
return
}
if utf8.RuneCountInString(form.Description) > 1024 {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
return
}
dataset.RepoID = ctx.Repo.Repository.ID
dataset.UserID = ctx.User.ID
dataset.Category = form.Category
dataset.Task = form.Task
dataset.Title = form.Title
dataset.License = form.License
dataset.Description = form.Description
dataset.DownloadTimes = 0
if ctx.Repo.Repository.IsPrivate {
dataset.Status = 0
} else {
dataset.Status = 1
}
err := models.CreateDataset(dataset)
if err != nil {
log.Error("fail to create dataset", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.create_dataset_fail")))
} else {
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}
}
func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
ctx.Data["PageIsDataset"] = true
ctx.Data["Title"] = ctx.Tr("dataset.edit_dataset")
if !titlePattern.MatchString(form.Title) {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.title_format_err")))
return
}
if utf8.RuneCountInString(form.Description) > 1024 {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.description_format_err")))
return
}
rel, err := models.GetDatasetByID(form.ID)
ctx.Data["dataset"] = rel
if err != nil {
ctx.ServerError("GetDataset", err)
return
}
if ctx.HasError() {
ctx.Data["Error"] = true
ctx.HTML(200, tplIndex)
log.Error("failed to query dataset", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
return
}
@@ -184,9 +272,236 @@ func EditDatasetPost(ctx *context.Context, form auth.EditDatasetForm) {
rel.Task = form.Task
rel.License = form.License
if err = models.UpdateDataset(models.DefaultDBContext(), rel); err != nil {
ctx.Data["Error"] = true
ctx.HTML(200, tplIndex)
log.Error("%v", err)
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("dataset.query_dataset_fail")))
}
ctx.Redirect(ctx.Repo.RepoLink + "/datasets?type=" + form.Type)
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}
func DatasetAction(ctx *context.Context) {
var err error
datasetId, _ := strconv.ParseInt(ctx.Params(":id"), 10, 64)
switch ctx.Params(":action") {
case "star":
err = models.StarDataset(ctx.User.ID, datasetId, true)
case "unstar":
err = models.StarDataset(ctx.User.ID, datasetId, false)
}
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("repo.star_fail", ctx.Params(":action"))))
} else {
ctx.JSON(http.StatusOK, models.BaseOKMessage)
}
}
func CurrentRepoDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
repo := ctx.Repo.Repository
var datasetIDs []int64
dataset, err := models.GetDatasetByRepo(repo)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetByRepo failed", err)))
return
}
datasetIDs = append(datasetIDs, dataset.ID)
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}
func MyDatasets(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
uploaderID := ctx.User.ID
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
UploaderID: uploaderID,
Type: cloudbrainType,
NeedIsPrivate: false,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}
func PublicDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: false,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}
func MyFavoriteDataset(ctx *context.Context) {
page := ctx.QueryInt("page")
cloudbrainType := ctx.QueryInt("type")
keyword := strings.Trim(ctx.Query("q"), " ")
var datasetIDs []int64
datasetStars, err := models.GetDatasetStarByUser(ctx.User)
if err != nil {
ctx.JSON(http.StatusOK, models.BaseErrorMessage(ctx.Tr("GetDatasetStarByUser failed", err)))
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
for i, _ := range datasetStars {
datasetIDs = append(datasetIDs, datasetStars[i].DatasetID)
}
datasets, count, err := models.Attachments(&models.AttachmentsOptions{
ListOptions: models.ListOptions{
Page: page,
PageSize: setting.UI.DatasetPagingNum,
},
Keyword: keyword,
NeedDatasetIDs: true,
DatasetIDs: datasetIDs,
NeedIsPrivate: true,
IsPrivate: false,
Type: cloudbrainType,
JustNeedZipFile: true,
NeedRepoInfo: true,
})
if err != nil {
ctx.ServerError("datasets", err)
return
}
data, err := json.Marshal(datasets)
if err != nil {
log.Error("json.Marshal failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"data": string(data),
"count": strconv.FormatInt(count, 10),
})
}
func GetDatasetStatus(ctx *context.Context) {
var (
err error
)
UUID := ctx.Params(":uuid")
attachment, err := models.GetAttachmentByUUID(UUID)
if err != nil {
log.Error("GetDatasetStarByUser failed:", err.Error())
ctx.JSON(200, map[string]string{
"result_code": "-1",
"error_msg": err.Error(),
"data": "",
})
return
}
ctx.JSON(200, map[string]string{
"result_code": "0",
"UUID": UUID,
"AttachmentStatus": fmt.Sprint(attachment.DecompressState),
})
}