#2580 #2369, #1962,#1803

Merged
lewis merged 11 commits from zouap_static into V20220801 1 year ago
  1. +14
    -10
      models/dbsql/dataset_foreigntable_for_es.sql
  2. +1
    -1
      models/dbsql/repo_foreigntable_for_es.sql
  3. +3
    -0
      models/repo_activity_custom.go
  4. +1
    -1
      models/user_analysis_for_activity.go
  5. +2
    -2
      modules/cloudbrain/cloudbrain.go
  6. +1
    -1
      modules/git/repo_stats_custom.go
  7. +14
    -8
      routers/repo/cloudbrain.go
  8. +26
    -2
      routers/search.go
  9. +2
    -1
      web_src/js/components/dataset/selectDataset.vue

+ 14
- 10
models/dbsql/dataset_foreigntable_for_es.sql View File

@@ -158,16 +158,20 @@ DROP TRIGGER IF EXISTS es_update_dataset on public.dataset;
CREATE OR REPLACE FUNCTION public.update_dataset() RETURNS trigger AS
$def$
BEGIN
UPDATE public.dataset_es
SET description=NEW.description,
title=NEW.title,
category=NEW.category,
task=NEW.task,
download_times=NEW.download_times,
updated_unix=NEW.updated_unix,
file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false),
file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false)
where id=NEW.id;
if (NEW.status=0) then
delete from public.dataset_es where id=NEW.id;
elsif (NEW.status=1) then
UPDATE public.dataset_es
SET description=NEW.description,
title=NEW.title,
category=NEW.category,
task=NEW.task,
download_times=NEW.download_times,
updated_unix=NEW.updated_unix,
file_name=(select array_to_string(array_agg(name order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false),
file_desc=(select array_to_string(array_agg(description order by created_unix desc),'-#,#-') from public.attachment where dataset_id=NEW.id and is_private=false)
where id=NEW.id;
end if;
return new;
END
$def$


+ 1
- 1
models/dbsql/repo_foreigntable_for_es.sql View File

@@ -461,7 +461,7 @@ $def$

if not OLD.is_private and NEW.is_private then
delete from public.issue_es where repo_id=NEW.id;
delete from public.dataset_es where repo_id=NEW.id;
-- delete from public.dataset_es where repo_id=NEW.id;
delete from public.repository_es where id=NEW.id;
end if;



+ 3
- 0
models/repo_activity_custom.go View File

@@ -238,6 +238,9 @@ func GetAllUserPublicRepoKPIStats(startTime time.Time, endTime time.Time) (map[s
CommitLines: 0,
}
}
if value.Email == "1250125907@qq.com" || value.Email == "peiyongyu-34@163.com" {
log.Info("repo path=" + repository.RepoPath())
}
authors[key].Commits += value.Commits
authors[key].CommitLines += value.CommitLines



+ 1
- 1
models/user_analysis_for_activity.go View File

@@ -195,7 +195,7 @@ func queryPullRequestPublic(start_unix int64, end_unix int64, publicAllRepo map[
sess := x.NewSession()
defer sess.Close()
resultMap := make(map[int64]int)
cond := "pull_request.merged_unix>=" + fmt.Sprint(start_unix) + " and pull_request.merged_unix<=" + fmt.Sprint(end_unix)
cond := "issue.created_unix>=" + fmt.Sprint(start_unix) + " and issue.created_unix<=" + fmt.Sprint(end_unix)
count, err := sess.Table("issue").Join("inner", "pull_request", "issue.id=pull_request.issue_id").Where(cond).Count(new(Issue))
if err != nil {
log.Info("query issue error. return.")


+ 2
- 2
modules/cloudbrain/cloudbrain.go View File

@@ -326,7 +326,7 @@ func GenerateTask(req GenerateCloudBrainTaskReq) error {
ReadOnly: true,
},
})
} else {
} else if len(req.DatasetInfos) > 1 {
for _, dataset := range req.DatasetInfos {
volumes = append(volumes, models.Volume{
HostPath: models.StHostPath{
@@ -547,7 +547,7 @@ func RestartTask(ctx *context.Context, task *models.Cloudbrain, newID *string) e
GPUNumber: resourceSpec.GpuNum,
MemoryMB: resourceSpec.MemMiB,
ShmMB: resourceSpec.ShareMemMiB,
Command: GetCloudbrainDebugCommand(),//Command,
Command: GetCloudbrainDebugCommand(), //Command,
NeedIBDevice: false,
IsMainRole: false,
UseNNI: false,


+ 1
- 1
modules/git/repo_stats_custom.go View File

@@ -62,7 +62,7 @@ func GetUserKPIStats(repoPath string, startTime time.Time, endTime time.Time) (m

after := startTime.Format(time.RFC3339)
until := endTime.Format(time.RFC3339)
args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until=='%s'", until)}
args := []string{"log", "--numstat", "--no-merges", "--branches=*", "--pretty=format:---%n%h%n%an%n%ae%n", "--date=iso", fmt.Sprintf("--after='%s'", after), fmt.Sprintf("--until='%s'", until)}
stdout, err := NewCommand(args...).RunInDirBytes(repoPath)
if err != nil {
return nil, err


+ 14
- 8
routers/repo/cloudbrain.go View File

@@ -2,7 +2,6 @@ package repo

import (
"bufio"
"code.gitea.io/gitea/modules/notification"
"encoding/json"
"errors"
"fmt"
@@ -16,6 +15,8 @@ import (
"time"
"unicode/utf8"

"code.gitea.io/gitea/modules/notification"

"code.gitea.io/gitea/modules/grampus"

"code.gitea.io/gitea/modules/timeutil"
@@ -475,6 +476,7 @@ func CloudBrainNew(ctx *context.Context) {
ctx.ServerError("get new cloudbrain info failed", err)
return
}
ctx.Data["PageIsGPUDebug"] = true
ctx.HTML(200, tplCloudBrainNew)
}

@@ -540,13 +542,17 @@ func CloudBrainCreate(ctx *context.Context, form auth.CreateCloudBrainForm) {
return
}
}

datasetInfos, datasetNames, err := models.GetDatasetInfo(uuids)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
cloudBrainTrainJobErrorPrepare(ctx, form)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
return
var datasetInfos map[string]models.DatasetInfo
var datasetNames string
//var
if uuids != "" {
datasetInfos, datasetNames, err = models.GetDatasetInfo(uuids)
if err != nil {
log.Error("GetDatasetInfo failed: %v", err, ctx.Data["MsgID"])
cloudBrainNewDataPrepare(ctx)
ctx.RenderWithErr(ctx.Tr("cloudbrain.error.dataset_select"), tpl, &form)
return
}
}

command := cloudbrain.GetCloudbrainDebugCommand()


+ 26
- 2
routers/search.go View File

@@ -313,9 +313,8 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa

res, err := client.Search(TableName).Query(boolQ).SortBy(getSort(SortBy, ascending, "num_stars", false)...).From(from).Size(Size).Highlight(queryHighlight("alias", "description", "topics")).Do(ctx.Req.Context())
if err == nil {
searchJson, _ := json.Marshal(res)
log.Info("searchJson=" + string(searchJson))
esresult := makeRepoResult(res, Key, OnlyReturnNum, language)
setForkRepoOrder(esresult)
resultObj.Total = resultObj.PrivateTotal + esresult.Total
isNeedSort := false
if len(resultObj.Result) > 0 {
@@ -348,6 +347,30 @@ func searchRepo(ctx *context.Context, TableName string, Key string, Page int, Pa
}
}

func setForkRepoOrder(esresult *SearchRes) {
forkidMap := make(map[string]int, 0)
for index, re := range esresult.Result {
if re["fork_id"] != nil {
fork_id := re["fork_id"].(string)
if _, ok := forkidMap[fork_id]; !ok {
forkidMap[fork_id] = index
}
}
}
for key, value := range forkidMap {
for index, re := range esresult.Result {
if re["id"].(string) == key {
if value < index { //swap
tmp := esresult.Result[index]
esresult.Result[index] = esresult.Result[value]
esresult.Result[value] = tmp
break
}
}
}
}
}

func sortRepo(Result []map[string]interface{}, SortBy string, ascending bool) {
orderBy := ""
switch SortBy {
@@ -479,6 +502,7 @@ func makeRepoResult(sRes *elastic.SearchResult, Key string, OnlyReturnNum bool,
record["num_stars"] = recordSource["num_stars"]
record["num_forks"] = recordSource["num_forks"]
record["lower_alias"] = recordSource["lower_alias"]
record["fork_id"] = recordSource["fork_id"]
if recordSource["topics"] != nil {
topicsStr := recordSource["topics"].(string)
log.Info("topicsStr=" + topicsStr)


+ 2
- 1
web_src/js/components/dataset/selectDataset.vue View File

@@ -955,7 +955,8 @@ export default {
) {
this.benchmarkNew = true;
}
if (location.href.indexOf("modelarts/notebook/create") !== -1) {
if (location.href.indexOf("modelarts/notebook/create") !== -1 || location.href.indexOf("/cloudbrain/create") !== -1) {
console.log("required is false;");
this.required = false;
}
window.onresize = () => {


Loading…
Cancel
Save