@@ -2,6 +2,7 @@ package models | |||
import ( | |||
"encoding/json" | |||
"errors" | |||
"fmt" | |||
"io/ioutil" | |||
"net/http" | |||
@@ -351,10 +352,10 @@ func QueryUserStaticDataForUserDefine(opts *UserBusinessAnalysisQueryOptions, wi | |||
} | |||
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix) | |||
SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
CreateRepoCountMap, _, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
LoginCountMap := queryLoginCount(start_unix, end_unix) | |||
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap, _ := queryCloudBrainTask(start_unix, end_unix) | |||
AiModelManageMap := queryUserModel(start_unix, end_unix) | |||
AiModelConvertMap := queryUserModelConvert(start_unix, end_unix) | |||
@@ -628,11 +629,11 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix) | |||
SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
CreateRepoCountMap, _, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
LoginCountMap := queryLoginCount(start_unix, end_unix) | |||
OpenIIndexMap := queryUserRepoOpenIIndex(startTime.Unix(), end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap, _ := queryCloudBrainTask(start_unix, end_unix) | |||
AiModelManageMap := queryUserModel(start_unix, end_unix) | |||
AiModelConvertMap := queryUserModelConvert(start_unix, end_unix) | |||
@@ -771,132 +772,6 @@ func refreshUserStaticTable(wikiCountMap map[string]int, tableName string, pageS | |||
log.Info("refresh data finished.tableName=" + tableName + " total record:" + fmt.Sprint(insertCount)) | |||
} | |||
func RefreshUserYearTable(pageStartTime time.Time, pageEndTime time.Time) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
log.Info("RefreshUserYearTable start....") | |||
statictisSess := xStatistic.NewSession() | |||
defer statictisSess.Close() | |||
log.Info("UserYear StartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) | |||
log.Info("UserYear EndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) | |||
start_unix := pageStartTime.Unix() | |||
end_unix := pageEndTime.Unix() | |||
CodeMergeCountMap := queryPullRequest(start_unix, end_unix) | |||
CommitCountMap := queryCommitAction(start_unix, end_unix, 5) | |||
mostActiveMap := queryMostActiveCommitAction(start_unix, end_unix) | |||
IssueCountMap := queryCreateIssue(start_unix, end_unix) | |||
CommentCountMap := queryComment(start_unix, end_unix) | |||
CommitCodeSizeMap, err := GetAllUserKPIStats(pageStartTime, pageEndTime) | |||
if err != nil { | |||
log.Info("query commit code errr.") | |||
} else { | |||
log.Info("query commit code size, len=" + fmt.Sprint(len(CommitCodeSizeMap))) | |||
} | |||
CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix) | |||
SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
CreateRepoCountMap, DetailInfoMap, MostDownloadMap := queryUserCreateRepo(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
_, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||
_, CreatedDataset := queryRecommedDataSet(start_unix, end_unix) | |||
bonusMap := getBonusMap() | |||
log.Info("truncate all data from table:user_summary_current_year ") | |||
statictisSess.Exec("TRUNCATE TABLE user_summary_current_year") | |||
cond := "type != 1 and is_active=true" | |||
count, err := sess.Where(cond).Count(new(User)) | |||
if err != nil { | |||
log.Info("query user error. return.") | |||
return | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
userList := make([]*User, 0) | |||
sess.Find(&userList) | |||
for _, userRecord := range userList { | |||
var dateRecordAll UserBusinessAnalysisAll | |||
dateRecordAll.ID = userRecord.ID | |||
dateRecordAll.Email = userRecord.Email | |||
dateRecordAll.Phone = userRecord.PhoneNumber | |||
dateRecordAll.RegistDate = userRecord.CreatedUnix | |||
dateRecordAll.Name = userRecord.Name | |||
dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap) | |||
dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap) | |||
dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) | |||
dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) | |||
if _, ok := CommitCodeSizeMap[dateRecordAll.Email]; !ok { | |||
dateRecordAll.CommitCodeSize = 0 | |||
} else { | |||
dateRecordAll.CommitCodeSize = int(CommitCodeSizeMap[dateRecordAll.Email].CommitLines) | |||
} | |||
//dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, CommitCodeSizeMap) | |||
dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) | |||
dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) | |||
dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) | |||
dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap) | |||
dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap) | |||
dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap) | |||
dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | |||
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | |||
dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | |||
//年度数据 | |||
subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC()) | |||
mostActiveDay := "" | |||
if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok { | |||
mostActiveDay = getMostActiveJson(userInfo) | |||
} | |||
scoreMap := make(map[string]float64) | |||
repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap) | |||
dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset) | |||
scoreMap["datasetscore"] = datasetscore | |||
codeInfo, codescore := getCodeInfo(&dateRecordAll) | |||
scoreMap["codescore"] = codescore | |||
cloudBrainInfo := getCloudBrainInfo(&dateRecordAll, CloudBrainTaskItemMap, scoreMap) | |||
playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap) | |||
exteral := 0 | |||
if int(subTime.Hours())%24 > 0 { | |||
exteral = 1 | |||
} | |||
re := &UserSummaryCurrentYear{ | |||
ID: dateRecordAll.ID, | |||
Name: dateRecordAll.Name, | |||
Email: dateRecordAll.Email, | |||
Phone: dateRecordAll.Phone, | |||
RegistDate: dateRecordAll.RegistDate, | |||
DateCount: int(subTime.Hours())/24 + exteral, | |||
MostActiveDay: mostActiveDay, | |||
RepoInfo: repoInfo, | |||
DataSetInfo: dataSetInfo, | |||
CodeInfo: codeInfo, | |||
CloudBrainInfo: cloudBrainInfo, | |||
PlayARoll: playARoll, | |||
} | |||
statictisSess.Insert(re) | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
log.Info("update user year data finished. ") | |||
} | |||
func getBonusWeekDataMap() map[int64][]int { | |||
bonusMap := make(map[int64][]int) | |||
url := setting.RecommentRepoAddr + "bonus/weekdata/record.txt" | |||
@@ -967,47 +842,6 @@ func getWeekAndNum(name string) (int, int) { | |||
return 0, 0 | |||
} | |||
func getBonusMap() map[string]map[string]int { | |||
bonusMap := make(map[string]map[string]int) | |||
url := setting.RecommentRepoAddr + "bonus/record.txt" | |||
content, err := GetContentFromPromote(url) | |||
if err == nil { | |||
filenames := strings.Split(content, "\n") | |||
for i := 0; i < len(filenames); i++ { | |||
if strings.HasSuffix(filenames[i], "\r") { | |||
filenames[i] = filenames[i][0 : len(filenames[i])-len("\r")] | |||
} | |||
url = setting.RecommentRepoAddr + "bonus/" + filenames[i] | |||
csvContent, err1 := GetContentFromPromote(url) | |||
if err1 == nil { | |||
//read csv | |||
lines := strings.Split(csvContent, "\n") | |||
for j := 1; j < len(lines); j++ { | |||
if strings.HasSuffix(lines[j], "\r") { | |||
lines[j] = lines[j][0 : len(lines[j])-len("\r")] | |||
} | |||
aLine := strings.Split(lines[j], ",") | |||
if len(aLine) < 7 { | |||
continue | |||
} | |||
userName := aLine[1] | |||
//email := lines[2] | |||
record, ok := bonusMap[userName] | |||
if !ok { | |||
record = make(map[string]int) | |||
bonusMap[userName] = record | |||
} | |||
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3]) | |||
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4]) | |||
record["total_cardtime"] = getMapKeyStringValue("total_cardtime", record) + getIntValue(aLine[5]) | |||
record["total_giveup"] = getMapKeyStringValue("total_giveup", record) + getIntValue(aLine[6]) | |||
} | |||
} | |||
} | |||
} | |||
return bonusMap | |||
} | |||
func getIntValue(val string) int { | |||
i, err := strconv.Atoi(val) | |||
if err == nil { | |||
@@ -1024,32 +858,14 @@ func getInt64Value(val string) int64 { | |||
return 0 | |||
} | |||
func getPlayARoll(bonusMap map[string]map[string]int, userName string, scoreMap map[string]float64) string { | |||
bonusInfo := make(map[string]string) | |||
record, ok := bonusMap[userName] | |||
if ok { | |||
rollscore := 0.0 | |||
bonusInfo["times"] = fmt.Sprint(record["times"]) | |||
if record["times"] >= 4 { | |||
rollscore = float64(record["times"]) / float64(4) | |||
} | |||
scoreMap["rollscore"] = rollscore | |||
bonusInfo["total_bonus"] = fmt.Sprint(record["total_bonus"]) | |||
bonusInfo["total_cardtime"] = fmt.Sprint(record["total_cardtime"]) | |||
bonusInfo["total_giveup"] = fmt.Sprint(record["total_giveup"]) | |||
bonusInfoJson, _ := json.Marshal(bonusInfo) | |||
return string(bonusInfoJson) | |||
} else { | |||
return "" | |||
} | |||
} | |||
func getCloudBrainInfo(dateRecordAll *UserBusinessAnalysisAll, CloudBrainTaskItemMap map[string]int, scoreMap map[string]float64) string { | |||
//2023:有XX 个使用启智集群资源,有XX 个使用智算网络集群,使用过的计算资源有GPU NPU GCU | |||
//2023:你的所有任务累计运行了XXX卡时,其中 GPU资源运行XX卡时 NPU资源运行XX卡时 GCU资源运行XX卡时 | |||
func getCloudBrainInfo(dateRecordAll *UserBusinessAnalysisAll, CloudBrainTaskItemMap map[string]int, scoreMap map[string]float64, resourceItemMap map[string]int) string { | |||
trainscore := 0.0 | |||
debugscore := 0.0 | |||
runtime := 0.0 | |||
if dateRecordAll.CloudBrainTaskNum > 0 { | |||
cloudBrainInfo := make(map[string]string) | |||
cloudBrainInfo := make(map[string]interface{}) | |||
cloudBrainInfo["create_task_num"] = fmt.Sprint(dateRecordAll.CloudBrainTaskNum) | |||
cloudBrainInfo["debug_task_num"] = fmt.Sprint(dateRecordAll.GpuDebugJob + dateRecordAll.NpuDebugJob) | |||
if dateRecordAll.GpuDebugJob+dateRecordAll.NpuDebugJob >= 50 { | |||
@@ -1066,6 +882,9 @@ func getCloudBrainInfo(dateRecordAll *UserBusinessAnalysisAll, CloudBrainTaskIte | |||
cloudBrainInfo["CloudBrainOne"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainOne"]) | |||
cloudBrainInfo["CloudBrainTwo"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_CloudBrainTwo"]) | |||
cloudBrainInfo["C2Net"] = fmt.Sprint(CloudBrainTaskItemMap[fmt.Sprint(dateRecordAll.ID)+"_C2Net"]) | |||
if resourceItemMap != nil { | |||
cloudBrainInfo["computer_resource"] = resourceItemMap | |||
} | |||
cloudBrainInfoJson, _ := json.Marshal(cloudBrainInfo) | |||
scoreMap["trainscore"] = trainscore | |||
scoreMap["debugscore"] = debugscore | |||
@@ -1079,25 +898,6 @@ func getCloudBrainInfo(dateRecordAll *UserBusinessAnalysisAll, CloudBrainTaskIte | |||
} | |||
} | |||
func getCodeInfo(dateRecordAll *UserBusinessAnalysisAll) (string, float64) { | |||
if dateRecordAll.CommitCount > 0 { | |||
codeInfo := make(map[string]string) | |||
codeInfo["commit_count"] = fmt.Sprint(dateRecordAll.CommitCount) | |||
codeInfo["commit_line"] = fmt.Sprint(dateRecordAll.CommitCodeSize) | |||
score := 0.0 | |||
score = float64(dateRecordAll.CommitCodeSize) / float64(dateRecordAll.CommitCount) / float64(20000) | |||
if score < (float64(dateRecordAll.CommitCount) / float64(100)) { | |||
score = float64(dateRecordAll.CommitCount) / float64(100) | |||
} | |||
codeInfo["score"] = fmt.Sprintf("%.2f", score) | |||
codeInfoJson, _ := json.Marshal(codeInfo) | |||
return string(codeInfoJson), score | |||
} else { | |||
return "", 0 | |||
} | |||
} | |||
func getDataSetInfo(userId int64, CreatedDataset map[int64]int, dataSetDownloadMap map[int64]int, CommitDatasetNumMap map[int64]int, CollectedDataset map[int64]int) (string, float64) { | |||
datasetInfo := make(map[string]string) | |||
score := 0.0 | |||
@@ -1123,60 +923,6 @@ func getDataSetInfo(userId int64, CreatedDataset map[int64]int, dataSetDownloadM | |||
} | |||
} | |||
func getRepoDetailInfo(repoDetailInfoMap map[string]int, userId int64, mostDownload map[int64]string) string { | |||
repoDetailInfo := make(map[string]string) | |||
if total, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total"]; ok { | |||
repoDetailInfo["repo_total"] = fmt.Sprint(total) | |||
} | |||
if private, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_private"]; ok { | |||
repoDetailInfo["repo_is_private"] = fmt.Sprint(private) | |||
} | |||
if public, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_public"]; ok { | |||
repoDetailInfo["repo_is_public"] = fmt.Sprint(public) | |||
} | |||
if download, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total_download"]; ok { | |||
repoDetailInfo["repo_total_download"] = fmt.Sprint(download) | |||
} | |||
if mostdownload, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_most_download"]; ok { | |||
repoDetailInfo["repo_most_download_count"] = fmt.Sprint(mostdownload) | |||
} | |||
if mostdownloadName, ok := mostDownload[userId]; ok { | |||
repoDetailInfo["repo_most_download_name"] = mostdownloadName | |||
} | |||
if len(repoDetailInfo) > 0 { | |||
repoDetailInfoJson, _ := json.Marshal(repoDetailInfo) | |||
return string(repoDetailInfoJson) | |||
} else { | |||
return "" | |||
} | |||
} | |||
func getMostActiveJson(userInfo map[string]int) string { | |||
mostActiveMap := make(map[string]string) | |||
if day, ok := userInfo["hour_day"]; ok { | |||
hour := userInfo["hour_hour"] | |||
month := userInfo["hour_month"] | |||
year := userInfo["hour_year"] | |||
delete(userInfo, "hour_day") | |||
delete(userInfo, "hour_hour") | |||
delete(userInfo, "hour_month") | |||
delete(userInfo, "hour_year") | |||
mostActiveMap["before_dawn"] = fmt.Sprint(year) + "/" + fmt.Sprint(month) + "/" + fmt.Sprint(day) + " " + fmt.Sprint(hour) | |||
} | |||
max := 0 | |||
max_day := "" | |||
for key, value := range userInfo { | |||
if value > max { | |||
max = value | |||
max_day = key | |||
} | |||
} | |||
mostActiveMap["most_active_day"] = max_day | |||
mostActiveMap["most_active_num"] = fmt.Sprint(max) | |||
mostActiveMapJson, _ := json.Marshal(mostActiveMap) | |||
return string(mostActiveMapJson) | |||
} | |||
func updateUserIndex(tableName string, statictisSess *xorm.Session, userId int64, userIndex float64) { | |||
updateSql := "UPDATE public." + tableName + " set user_index=" + fmt.Sprint(userIndex*100) + " where id=" + fmt.Sprint(userId) | |||
statictisSess.Exec(updateSql) | |||
@@ -1281,10 +1027,10 @@ func CounDataByDateAndReCount(wikiCountMap map[string]int, startTime time.Time, | |||
} | |||
CommitDatasetSizeMap, CommitDatasetNumMap, _ := queryDatasetSize(start_unix, end_unix) | |||
SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
CreateRepoCountMap, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
CreateRepoCountMap, _, _, _ := queryUserCreateRepo(start_unix, end_unix) | |||
LoginCountMap := queryLoginCount(start_unix, end_unix) | |||
OpenIIndexMap := queryUserRepoOpenIIndex(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap, _ := queryCloudBrainTask(start_unix, end_unix) | |||
AiModelManageMap := queryUserModel(start_unix, end_unix) | |||
AiModelConvertMap := queryUserModelConvert(start_unix, end_unix) | |||
@@ -2308,7 +2054,7 @@ func queryDatasetSize(start_unix int64, end_unix int64) (map[int64]int, map[int6 | |||
return resultSizeMap, resultNumMap, resultDownloadMap | |||
} | |||
func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]string) { | |||
func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]string, map[string]map[string]interface{}) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
resultMap := make(map[int64]int) | |||
@@ -2316,16 +2062,18 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
detailInfoMap := make(map[string]int) | |||
mostDownloadMap := make(map[int64]string) | |||
fourSeasonMap := make(map[string]map[string]interface{}) | |||
cond := "is_fork=false and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Count(new(Repository)) | |||
if err != nil { | |||
log.Info("query Repository error. return.") | |||
return resultMap, detailInfoMap, mostDownloadMap | |||
return resultMap, detailInfoMap, mostDownloadMap, fourSeasonMap | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
sess.Select("id,owner_id,name,is_private,clone_cnt,alias").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
sess.Select("id,owner_id,name,is_private,clone_cnt,alias,created_unix").Table("repository").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
repoList := make([]*Repository, 0) | |||
sess.Find(&repoList) | |||
log.Info("query Repository size=" + fmt.Sprint(len(repoList))) | |||
@@ -2350,6 +2098,7 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
detailInfoMap[key] = int(repoRecord.CloneCnt) | |||
mostDownloadMap[repoRecord.OwnerID] = repoRecord.DisplayName() | |||
} | |||
setFourSeasonData(repoRecord, fourSeasonMap) | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
@@ -2357,7 +2106,29 @@ func queryUserCreateRepo(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
} | |||
} | |||
return resultMap, detailInfoMap, mostDownloadMap | |||
return resultMap, detailInfoMap, mostDownloadMap, fourSeasonMap | |||
} | |||
func setFourSeasonData(repoRecord *Repository, fourSeason map[string]map[string]interface{}) { | |||
key := "" | |||
switch repoRecord.CreatedUnix.AsTime().Month() { | |||
case time.January, time.February, time.March: | |||
key = fmt.Sprint(repoRecord.OwnerID) + "_spring" | |||
case time.April, time.May, time.June: | |||
key = fmt.Sprint(repoRecord.OwnerID) + "_summer" | |||
case time.July, time.August, time.September: | |||
key = fmt.Sprint(repoRecord.OwnerID) + "_autumn" | |||
case time.October, time.November, time.December: | |||
key = fmt.Sprint(repoRecord.OwnerID) + "_winter" | |||
default: | |||
log.Info("no found") | |||
} | |||
repoInfo := make(map[string]interface{}) | |||
repoInfo["time"] = repoRecord.CreatedUnix | |||
repoInfo["displayName"] = repoRecord.DisplayName() | |||
if _, ok := fourSeason[key]; !ok { | |||
fourSeason[key] = repoInfo | |||
} | |||
} | |||
func queryUserRepoOpenIIndex(start_unix int64, end_unix int64) map[int64]float64 { | |||
@@ -2521,17 +2292,18 @@ func isBenchMark(JobType string) bool { | |||
return false | |||
} | |||
func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int) { | |||
func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[string]int, map[int64]map[string]int) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
resultMap := make(map[int64]int) | |||
resultItemMap := make(map[string]int) | |||
resourceItemMap := make(map[int64]map[string]int) | |||
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Unscoped().Count(new(Cloudbrain)) | |||
if err != nil { | |||
log.Info("query cloudbrain error. return.") | |||
return resultMap, resultItemMap | |||
return resultMap, resultItemMap, resourceItemMap | |||
} | |||
log.Info("cloudbrain count=" + fmt.Sprint(count)) | |||
var indexTotal int64 | |||
@@ -2547,8 +2319,15 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
} else { | |||
resultMap[cloudTaskRecord.UserID] += 1 | |||
} | |||
if _, ok := resourceItemMap[cloudTaskRecord.UserID]; !ok { | |||
resourceItemMap[cloudTaskRecord.UserID] = make(map[string]int) | |||
} | |||
if cloudTaskRecord.Duration < 100000000 && cloudTaskRecord.Duration > 0 { | |||
setMapKey("CloudBrainRunTime", cloudTaskRecord.UserID, int(cloudTaskRecord.Duration), resultItemMap) | |||
resourceItemMap[cloudTaskRecord.UserID][cloudTaskRecord.ComputeResource] = resourceItemMap[cloudTaskRecord.UserID][cloudTaskRecord.ComputeResource] + int(cloudTaskRecord.Duration) | |||
} else { | |||
resourceItemMap[cloudTaskRecord.UserID][cloudTaskRecord.ComputeResource] = 0 | |||
} | |||
if cloudTaskRecord.Type == 1 { //npu | |||
setMapKey("CloudBrainTwo", cloudTaskRecord.UserID, 1, resultItemMap) | |||
@@ -2583,6 +2362,8 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
} else if cloudTaskRecord.ComputeResource == GPUResource { | |||
if cloudTaskRecord.JobType == "TRAIN" { | |||
setMapKey("GpuTrainJob", cloudTaskRecord.UserID, 1, resultItemMap) | |||
} else if cloudTaskRecord.JobType == "ONLINEINFERENCE" { | |||
setMapKey("GpuInferenceJob", cloudTaskRecord.UserID, 1, resultItemMap) | |||
} else { | |||
setMapKey("GpuDebugJob", cloudTaskRecord.UserID, 1, resultItemMap) | |||
} | |||
@@ -2594,7 +2375,7 @@ func queryCloudBrainTask(start_unix int64, end_unix int64) (map[int64]int, map[s | |||
break | |||
} | |||
} | |||
return resultMap, resultItemMap | |||
return resultMap, resultItemMap, resourceItemMap | |||
} | |||
func queryUserInvitationCount(start_unix int64, end_unix int64) map[int64]int { | |||
@@ -2674,13 +2455,20 @@ func GetContentFromPromote(url string) (string, error) { | |||
} | |||
}() | |||
resp, err := http.Get(url) | |||
if err != nil || resp.StatusCode != 200 { | |||
if err != nil { | |||
log.Info("Get organizations url error=" + err.Error()) | |||
return "", err | |||
} | |||
if resp == nil { | |||
log.Info("respone is null") | |||
return "", errors.New("resp is null") | |||
} | |||
if resp.StatusCode != 200 { | |||
log.Info("respone code=" + fmt.Sprint(resp.StatusCode)) | |||
return "", errors.New("resp is null") | |||
} | |||
defer resp.Body.Close() | |||
bytes, err := ioutil.ReadAll(resp.Body) | |||
resp.Body.Close() | |||
if err != nil { | |||
log.Info("Get organizations url error=" + err.Error()) | |||
return "", err | |||
@@ -2,27 +2,6 @@ package models | |||
import "code.gitea.io/gitea/modules/timeutil" | |||
type UserSummaryCurrentYear struct { | |||
ID int64 `xorm:"pk"` | |||
Email string `xorm:"NOT NULL"` | |||
//user | |||
Name string `xorm:"NOT NULL"` | |||
Phone string `xorm:"NULL"` | |||
//user | |||
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` | |||
DateCount int `xorm:"NOT NULL DEFAULT 0"` | |||
MostActiveDay string `xorm:" NULL "` //08.05 | |||
RepoInfo string `xorm:"varchar(1000)"` //创建了XX 个项目,公开项目XX 个,私有项目XX 个累计被下载XXX 次,其中《XXXXXXX 》项目,获得了最高XXX 次下载 | |||
DataSetInfo string `xorm:"varchar(500)"` //创建了XX 个数据集,上传了XX 个数据集文件,累计被下载XX 次,被收藏XX 次 | |||
CodeInfo string `xorm:"varchar(500)"` //代码提交次数,提交总代码行数,最晚的提交时间 | |||
CloudBrainInfo string `xorm:"varchar(1000)"` //,创建了XX 个云脑任务,调试任务XX 个,训练任务XX 个,推理任务XX 个,累计运行了XXXX 卡时,累计节省xxxxx 元 | |||
//这些免费的算力资源分别有,XX% 来自鹏城云脑1,XX% 来自鹏城云脑2,XX% 来自智算网络 | |||
PlayARoll string `xorm:"varchar(500)"` //你参加了XX 次“我为开源打榜狂”活动,累计上榜XX 次,总共获得了社区XXX 元的激励 | |||
WeekBonusData string `xorm:"-"` | |||
Label string `xorm:"varchar(500)"` | |||
} | |||
type UserBusinessAnalysisCurrentYear struct { | |||
ID int64 `xorm:"pk"` | |||
CountDate int64 `xorm:"pk"` | |||
@@ -0,0 +1,640 @@ | |||
package models | |||
import ( | |||
"encoding/json" | |||
"fmt" | |||
"sort" | |||
"strings" | |||
"time" | |||
"code.gitea.io/gitea/modules/log" | |||
"code.gitea.io/gitea/modules/setting" | |||
"code.gitea.io/gitea/modules/timeutil" | |||
) | |||
type UserSummaryCurrentYear struct { | |||
ID int64 `xorm:"pk"` | |||
Email string `xorm:"NOT NULL"` | |||
//user | |||
Name string `xorm:"NOT NULL"` | |||
Phone string `xorm:"NULL"` | |||
//user | |||
RegistDate timeutil.TimeStamp `xorm:"NOT NULL"` | |||
DateCount int `xorm:"NOT NULL DEFAULT 0"` | |||
MostActiveDay string `xorm:" NULL "` //08.05 | |||
RepoInfo string `xorm:"varchar(1000)"` //创建了XX 个项目,公开项目XX 个,私有项目XX 个累计被下载XXX 次,其中《XXXXXXX 》项目,获得了最高XXX 次下载 | |||
//2023:增加春、夏、秋、冬四季创建的项目信息。 | |||
DataSetInfo string `xorm:"varchar(500)"` //创建了XX 个数据集,上传了XX 个数据集文件,累计被下载XX 次,被收藏XX 次 | |||
CodeInfo string `xorm:"varchar(500)"` //代码提交次数,提交总代码行数,最晚的提交时间 | |||
//2023:所有人的代码行要进行排序,并设置每个人的排位 | |||
CloudBrainInfo string `xorm:"varchar(1000)"` //,创建了XX 个云脑任务,调试任务XX 个,训练任务XX 个,推理任务XX 个,累计运行了XXXX 卡时,累计节省xxxxx 元 | |||
//这些免费的算力资源分别有,XX% 来自鹏城云脑1,XX% 来自鹏城云脑2,XX% 来自智算网络 | |||
//2023:有XX 个使用启智集群资源,有XX 个使用智算网络集群,使用过的计算资源有GPU NPU GCU | |||
//2023:你的所有任务累计运行了XXX卡时,其中 GPU资源运行XX卡时 NPU资源运行XX卡时 GCU资源运行XX卡时 | |||
PlayARoll string `xorm:"varchar(500)"` //你参加了XX 次“我为开源打榜狂”活动,累计上榜XX 次,总共获得了社区XXX 元的激励 | |||
WeekBonusData string `xorm:"-"` | |||
Label string `xorm:"varchar(500)"` | |||
IssueInfo string `xorm:"varchar(500)"` //2023:ISSUE相关信息,包括创建issue | |||
ModelInfo string `xorm:"varchar(500)"` //2023:模型相关信息,包括创建的模型个数,要给出下载次数最多、引用次数最多的模型 | |||
LoginCount int `xorm:"NOT NULL DEFAULT 0"` //2023:当年的登录次数 | |||
ActionInfo string `xorm:"varchar(500)"` //2023:最勤奋的一个月,取个人活动页面最多的一个月,还要给出该月活动最多的一天。 | |||
AccumulatePointInfo string `xorm:"varchar(500)"` //2023:获取的积分总数及消耗的总数,还剩余多少积分。 | |||
ForumInfo string `xorm:"varchar(500)"` //2023: 你在OpenI论坛发表了XX篇贴 总共获得了XX次浏 XX次点赞 其中《XXXXX》 获得了最高XX次浏览 | |||
CourseInfo string `xorm:"varchar(500)"` //2023:你在OpenI学习了XX门课程 总共完成了XX章节实训 40%课程为机器学习,列出4个最多的分类 | |||
ActionDays string `xorm:"varchar(500)"` //2023:你在OpenI中有产生活动的天数。 | |||
} | |||
func RefreshUserYearTable(pageStartTime time.Time, pageEndTime time.Time) { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
log.Info("RefreshUserYearTable start....") | |||
statictisSess := xStatistic.NewSession() | |||
defer statictisSess.Close() | |||
if time.Now().After(pageEndTime) { | |||
log.Info("The time is exceed. not need to truncate.") | |||
return | |||
} | |||
log.Info("UserYear StartTime:" + pageStartTime.Format("2006-01-02 15:04:05")) | |||
log.Info("UserYear EndTime time:" + pageEndTime.Format("2006-01-02 15:04:05")) | |||
start_unix := pageStartTime.Unix() | |||
end_unix := pageEndTime.Unix() | |||
CodeMergeCountMap := queryPullRequest(start_unix, end_unix) | |||
CommitCountMap := queryCommitAction(start_unix, end_unix, 5) | |||
mostActiveMap := queryMostActiveCommitAction(start_unix, end_unix) | |||
IssueCountMap := queryCreateIssue(start_unix, end_unix) | |||
UserYearModel := queryUserYearModel(start_unix, end_unix) | |||
CommentCountMap := queryComment(start_unix, end_unix) | |||
LoginMap := queryLoginCount(start_unix, end_unix) | |||
existCommitCodeSize := queryCommitCodeSizeFromDb("public.user_business_analysis_current_year") | |||
commitCodeArrays := make([]int, 0) | |||
for _, v := range existCommitCodeSize { | |||
commitCodeArrays = append(commitCodeArrays, v) | |||
} | |||
sort.Ints(commitCodeArrays) | |||
//log.Info("query commit code size, len=" + fmt.Sprint(len(existCommitCodeSize))) | |||
CommitDatasetSizeMap, CommitDatasetNumMap, dataSetDownloadMap := queryDatasetSize(start_unix, end_unix) | |||
SolveIssueCountMap := querySolveIssue(start_unix, end_unix) | |||
CreateRepoCountMap, DetailInfoMap, MostDownloadMap, fourSeasonMap := queryUserCreateRepo(start_unix, end_unix) | |||
CloudBrainTaskMap, CloudBrainTaskItemMap, resourceItemMap := queryCloudBrainTask(start_unix, end_unix) | |||
_, CollectedDataset := queryDatasetStars(start_unix, end_unix) | |||
_, CreatedDataset := queryRecommedDataSet(start_unix, end_unix) | |||
PointMap := queryPointInfo(start_unix, end_unix) | |||
MostActiveDayMap := queryMostActiveMonth(start_unix, end_unix) | |||
bonusMap := getBonusMap() | |||
forumMap := getForumMap() | |||
actionDays := queryActionDays(start_unix, end_unix) | |||
log.Info("truncate all data from table:user_summary_current_year ") | |||
statictisSess.Exec("TRUNCATE TABLE user_summary_current_year") | |||
cond := "type != 1 and is_active=true" | |||
count, err := sess.Where(cond).Count(new(User)) | |||
if err != nil { | |||
log.Info("query user error. return.") | |||
return | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
sess.Select("`user`.*").Table("user").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
userList := make([]*User, 0) | |||
sess.Find(&userList) | |||
for _, userRecord := range userList { | |||
var dateRecordAll UserBusinessAnalysisAll | |||
dateRecordAll.ID = userRecord.ID | |||
dateRecordAll.Email = userRecord.Email | |||
dateRecordAll.Phone = userRecord.PhoneNumber | |||
dateRecordAll.RegistDate = userRecord.CreatedUnix | |||
dateRecordAll.Name = userRecord.Name | |||
dateRecordAll.CodeMergeCount = getMapValue(dateRecordAll.ID, CodeMergeCountMap) | |||
dateRecordAll.CommitCount = getMapValue(dateRecordAll.ID, CommitCountMap) | |||
dateRecordAll.IssueCount = getMapValue(dateRecordAll.ID, IssueCountMap) | |||
dateRecordAll.CommentCount = getMapValue(dateRecordAll.ID, CommentCountMap) | |||
dateRecordAll.CommitCodeSize = getMapValue(dateRecordAll.ID, existCommitCodeSize) | |||
dateRecordAll.CommitDatasetSize = getMapValue(dateRecordAll.ID, CommitDatasetSizeMap) | |||
dateRecordAll.CommitDatasetNum = getMapValue(dateRecordAll.ID, CommitDatasetNumMap) | |||
dateRecordAll.SolveIssueCount = getMapValue(dateRecordAll.ID, SolveIssueCountMap) | |||
dateRecordAll.CreateRepoCount = getMapValue(dateRecordAll.ID, CreateRepoCountMap) | |||
dateRecordAll.LoginCount = getMapValue(dateRecordAll.ID, LoginMap) | |||
dateRecordAll.CloudBrainTaskNum = getMapValue(dateRecordAll.ID, CloudBrainTaskMap) | |||
dateRecordAll.GpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuDebugJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuDebugJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuDebugJob", CloudBrainTaskItemMap) | |||
dateRecordAll.GpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuTrainJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuTrainJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuTrainJob", CloudBrainTaskItemMap) | |||
dateRecordAll.NpuInferenceJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_NpuInferenceJob", CloudBrainTaskItemMap) | |||
dateRecordAll.GpuBenchMarkJob = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_GpuBenchMarkJob", CloudBrainTaskItemMap) | |||
dateRecordAll.CloudBrainRunTime = getMapKeyStringValue(fmt.Sprint(dateRecordAll.ID)+"_CloudBrainRunTime", CloudBrainTaskItemMap) | |||
//年度数据 | |||
subTime := time.Now().UTC().Sub(dateRecordAll.RegistDate.AsTime().UTC()) | |||
mostActiveDay := "" | |||
if userInfo, ok := mostActiveMap[dateRecordAll.ID]; ok { | |||
mostActiveDay = getMostActiveJson(userInfo) | |||
} | |||
scoreMap := make(map[string]float64) | |||
repoInfo := getRepoDetailInfo(DetailInfoMap, dateRecordAll.ID, MostDownloadMap, fourSeasonMap) | |||
dataSetInfo, datasetscore := getDataSetInfo(dateRecordAll.ID, CreatedDataset, dataSetDownloadMap, CommitDatasetNumMap, CollectedDataset) | |||
scoreMap["datasetscore"] = datasetscore | |||
codeInfo, codescore := getCodeInfo(&dateRecordAll, commitCodeArrays) | |||
scoreMap["codescore"] = codescore | |||
cloudBrainInfo := getCloudBrainInfo(&dateRecordAll, CloudBrainTaskItemMap, scoreMap, resourceItemMap[dateRecordAll.ID]) | |||
playARoll := getPlayARoll(bonusMap, dateRecordAll.Name, scoreMap) | |||
forumInfo := getForumInfo(forumMap, dateRecordAll.Name) | |||
exteral := 0 | |||
if int(subTime.Hours())%24 > 0 { | |||
exteral = 1 | |||
} | |||
re := &UserSummaryCurrentYear{ | |||
ID: dateRecordAll.ID, | |||
Name: dateRecordAll.Name, | |||
Email: dateRecordAll.Email, | |||
Phone: dateRecordAll.Phone, | |||
RegistDate: dateRecordAll.RegistDate, | |||
DateCount: int(subTime.Hours())/24 + exteral, | |||
MostActiveDay: mostActiveDay, | |||
RepoInfo: repoInfo, | |||
DataSetInfo: dataSetInfo, | |||
CodeInfo: codeInfo, | |||
CloudBrainInfo: cloudBrainInfo, | |||
PlayARoll: playARoll, | |||
IssueInfo: getIssueInfo(&dateRecordAll), | |||
ModelInfo: getModelInfo(&dateRecordAll, UserYearModel), | |||
LoginCount: dateRecordAll.LoginCount, | |||
AccumulatePointInfo: getPointInfo(&dateRecordAll, PointMap), | |||
ActionInfo: getActionInfo(&dateRecordAll, MostActiveDayMap), | |||
ForumInfo: forumInfo, | |||
ActionDays: getActionDaysInfo(actionDays[dateRecordAll.ID]), | |||
} | |||
statictisSess.Insert(re) | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
log.Info("update user year data finished. ") | |||
} | |||
func getActionDaysInfo(days int) string { | |||
return fmt.Sprint(days) | |||
} | |||
func getForumInfo(forumMap map[string]map[string]interface{}, name string) string { | |||
record, ok := forumMap[name] | |||
if ok { | |||
forumJson, _ := json.Marshal(record) | |||
return string(forumJson) | |||
} | |||
return "" | |||
} | |||
func getActionInfo(dateRecordAll *UserBusinessAnalysisAll, MostActiveDayMap map[int64]map[string]int) string { | |||
record, ok := MostActiveDayMap[dateRecordAll.ID] | |||
if ok { | |||
actionJson, _ := json.Marshal(record) | |||
return string(actionJson) | |||
} | |||
return "" | |||
} | |||
func getPointInfo(dateRecordAll *UserBusinessAnalysisAll, PointMap map[int64]map[string]int) string { | |||
record, ok := PointMap[dateRecordAll.ID] | |||
if ok { | |||
pointJson, _ := json.Marshal(record) | |||
return string(pointJson) | |||
} | |||
return "" | |||
} | |||
func getModelInfo(dateRecordAll *UserBusinessAnalysisAll, userModelMap map[int64]map[string]interface{}) string { | |||
record, ok := userModelMap[dateRecordAll.ID] | |||
if ok { | |||
userModelJson, _ := json.Marshal(record) | |||
return string(userModelJson) | |||
} | |||
return "" | |||
} | |||
func getIssueInfo(dateRecordAll *UserBusinessAnalysisAll) string { | |||
issueInfo := make(map[string]string) | |||
issueInfo["create_count"] = fmt.Sprint(dateRecordAll.IssueCount) | |||
issueInfoJson, _ := json.Marshal(issueInfo) | |||
return string(issueInfoJson) | |||
} | |||
func getBonusMap() map[string]map[string]int { | |||
bonusMap := make(map[string]map[string]int) | |||
url := setting.RecommentRepoAddr + "bonus/2023.txt" | |||
content, err := GetContentFromPromote(url) | |||
if err == nil { | |||
filenames := strings.Split(content, "\n") | |||
for i := 0; i < len(filenames); i++ { | |||
filenames[i] = strings.TrimSuffix(filenames[i], "\r") | |||
url = setting.RecommentRepoAddr + "bonus/" + filenames[i] | |||
log.Info("bonus url=" + url) | |||
csvContent, err1 := GetContentFromPromote(url) | |||
if err1 == nil { | |||
//read csv | |||
lines := strings.Split(csvContent, "\n") | |||
for j := 1; j < len(lines); j++ { | |||
newLine := strings.TrimSuffix(lines[j], "\r") | |||
aLine := strings.Split(newLine, ",") | |||
if len(aLine) < 9 { | |||
continue | |||
} | |||
userName := aLine[1] | |||
record, ok := bonusMap[userName] | |||
if !ok { | |||
record = make(map[string]int) | |||
bonusMap[userName] = record | |||
} | |||
record["times"] = getMapKeyStringValue("times", record) + getIntValue(aLine[3]) | |||
record["total_bonus"] = getMapKeyStringValue("total_bonus", record) + getIntValue(aLine[4]) | |||
record["total_cardtime"] = getMapKeyStringValue("total_cardtime", record) + getIntValue(aLine[5]) | |||
record["total_giveup"] = getMapKeyStringValue("total_giveup", record) + getIntValue(aLine[6]) | |||
record["total_award_title"] = getMapKeyStringValue("total_award_title", record) + getIntValue(aLine[8]) | |||
} | |||
} | |||
} | |||
} | |||
return bonusMap | |||
} | |||
func getForumMap() map[string]map[string]interface{} { | |||
sep := "(-2023-)" | |||
forumMap := make(map[string]map[string]interface{}) | |||
url := setting.RecommentRepoAddr + "forum/data.txt" | |||
content, err := GetContentFromPromote(url) | |||
if err == nil { | |||
lines := strings.Split(content, "\n") | |||
for i := 0; i < len(lines); i++ { | |||
newLine := strings.TrimSuffix(lines[i], "\r") | |||
//read a line | |||
aLine := strings.Split(newLine, sep) | |||
if len(aLine) < 6 { | |||
continue | |||
} | |||
userName := aLine[0] | |||
record, ok := forumMap[userName] | |||
if !ok { | |||
record = make(map[string]interface{}) | |||
forumMap[userName] = record | |||
} | |||
record["max_subject"] = aLine[1] | |||
record["total_view_num"] = getIntValue(aLine[2]) | |||
record["total_num"] = getIntValue(aLine[3]) | |||
record["total_star_num"] = getIntValue(aLine[4]) | |||
record["max_view_count"] = getIntValue(aLine[5]) | |||
} | |||
} | |||
return forumMap | |||
} | |||
func getPlayARoll(bonusMap map[string]map[string]int, userName string, scoreMap map[string]float64) string { | |||
bonusInfo := make(map[string]string) | |||
record, ok := bonusMap[userName] | |||
if ok { | |||
rollscore := 0.0 | |||
bonusInfo["times"] = fmt.Sprint(record["times"]) | |||
if record["times"] >= 4 { | |||
rollscore = float64(record["times"]) / float64(4) | |||
} | |||
scoreMap["rollscore"] = rollscore | |||
bonusInfo["total_bonus"] = fmt.Sprint(record["total_bonus"]) | |||
bonusInfo["total_cardtime"] = fmt.Sprint(record["total_cardtime"]) | |||
bonusInfo["total_giveup"] = fmt.Sprint(record["total_giveup"]) | |||
bonusInfo["total_award_title"] = fmt.Sprint(record["total_award_title"]) | |||
bonusInfoJson, _ := json.Marshal(bonusInfo) | |||
return string(bonusInfoJson) | |||
} else { | |||
return "" | |||
} | |||
} | |||
func queryUserYearModel(start_unix int64, end_unix int64) map[int64]map[string]interface{} { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
resultMap := make(map[int64]map[string]interface{}) | |||
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Count(new(AiModelManage)) | |||
if err != nil { | |||
log.Info("query AiModelManage error. return.") | |||
return resultMap | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
sess.Select("id,user_id,download_count,reference_count").Table("ai_model_manage").Where(cond).OrderBy("id asc").Limit(PAGE_SIZE, int(indexTotal)) | |||
aiModelList := make([]*AiModelManage, 0) | |||
sess.Find(&aiModelList) | |||
log.Info("query user year AiModelManage size=" + fmt.Sprint(len(aiModelList))) | |||
for _, aiModelRecord := range aiModelList { | |||
if _, ok := resultMap[aiModelRecord.UserId]; !ok { | |||
modelmap := make(map[string]interface{}) | |||
modelmap["count"] = 1 | |||
modelmap["max_download_count"] = aiModelRecord.DownloadCount | |||
modelmap["max_reference_count"] = aiModelRecord.ReferenceCount | |||
modelmap["name"] = aiModelRecord.Name | |||
resultMap[aiModelRecord.UserId] = modelmap | |||
} else { | |||
resultMap[aiModelRecord.UserId]["count"] = resultMap[aiModelRecord.UserId]["count"].(int) + 1 | |||
if resultMap[aiModelRecord.UserId]["max_download_count"].(int) < aiModelRecord.DownloadCount { | |||
resultMap[aiModelRecord.UserId]["max_download_count"] = aiModelRecord.DownloadCount | |||
resultMap[aiModelRecord.UserId]["max_reference_count"] = aiModelRecord.ReferenceCount | |||
resultMap[aiModelRecord.UserId]["name"] = aiModelRecord.Name | |||
} | |||
} | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
return resultMap | |||
} | |||
func queryPointInfo(start_unix int64, end_unix int64) map[int64]map[string]int { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
scoreInfoMap := make(map[int64]map[string]int) | |||
cond := " created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Count(new(PointAccountLog)) | |||
if err != nil { | |||
log.Info("query PointAccountLog error. return.") | |||
return scoreInfoMap | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
sess.Select("id,user_id,type,points_amount,balance_after").Table("point_account_log").Where(cond).OrderBy("id desc").Limit(PAGE_SIZE, int(indexTotal)) | |||
pointAccountLogList := make([]*PointAccountLog, 0) | |||
sess.Find(&pointAccountLogList) | |||
log.Info("query user year AiModelManage size=" + fmt.Sprint(len(pointAccountLogList))) | |||
for _, pointAccountLogRecord := range pointAccountLogList { | |||
if _, ok := scoreInfoMap[pointAccountLogRecord.UserId]; !ok { | |||
pointMap := make(map[string]int) | |||
scoreInfoMap[pointAccountLogRecord.UserId] = pointMap | |||
scoreInfoMap[pointAccountLogRecord.UserId]["total"] = int(pointAccountLogRecord.BalanceAfter) | |||
} | |||
if pointAccountLogRecord.Type == "increase" { | |||
scoreInfoMap[pointAccountLogRecord.UserId]["increase"] += int(pointAccountLogRecord.PointsAmount) | |||
} | |||
if pointAccountLogRecord.Type == "decrease" { | |||
scoreInfoMap[pointAccountLogRecord.UserId]["decrease"] += int(pointAccountLogRecord.PointsAmount) | |||
} | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
return scoreInfoMap | |||
} | |||
func queryMostActiveMonth(start_unix int64, end_unix int64) map[int64]map[string]int { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
actionInfoMap := make(map[int64]map[string]int) | |||
cond := "(op_type<=" + fmt.Sprint(17) + " or op_type>=21) and user_id=act_user_id and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Count(new(Action)) | |||
if err != nil { | |||
log.Info("query Action error. return.") | |||
return actionInfoMap | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
actionList, err := sess.QueryInterface("select id,user_id,op_type,act_user_id,created_unix from public.action where " + cond + " order by id asc limit " + fmt.Sprint(PAGE_SIZE) + " offset " + fmt.Sprint(indexTotal)) | |||
if err != nil { | |||
log.Info("error:" + err.Error()) | |||
continue | |||
} | |||
log.Info("query action size=" + fmt.Sprint(len(actionList))) | |||
for _, actionRecord := range actionList { | |||
userId := convertInterfaceToInt64(actionRecord["user_id"]) | |||
created_unix := convertInterfaceToInt64(actionRecord["created_unix"]) | |||
created_time := time.Unix(created_unix, 0) | |||
str_time := created_time.Format("20060102") | |||
str_time = str_time[4:] | |||
if _, ok := actionInfoMap[userId]; !ok { | |||
actionMap := make(map[string]int) | |||
actionInfoMap[userId] = actionMap | |||
} | |||
actionInfoMap[userId][str_time] += 1 | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
returnMap := make(map[int64]map[string]int) | |||
for k, v := range actionInfoMap { | |||
returnMap[k] = getActionMostActiveDayMap(v) | |||
} | |||
return returnMap | |||
} | |||
func getActionMostActiveDayMap(record map[string]int) map[string]int { | |||
reMap := make(map[string]int) | |||
monthMap := make(map[string]int) | |||
for k, v := range record { | |||
month := k[0:2] | |||
monthMap[month] += v | |||
} | |||
maxMonthCount := 0 | |||
maxMonthCountKey := "" | |||
for k, v := range monthMap { | |||
if v > maxMonthCount { | |||
maxMonthCount = v | |||
maxMonthCountKey = k | |||
} | |||
} | |||
reMap[maxMonthCountKey] = maxMonthCount | |||
maxDayCount := 0 | |||
maxDayCountKey := "" | |||
for k, v := range record { | |||
month := k[0:2] | |||
if month == maxMonthCountKey { | |||
if v > maxDayCount { | |||
maxDayCount = v | |||
maxDayCountKey = k | |||
} | |||
} | |||
} | |||
reMap[maxDayCountKey] = maxDayCount | |||
return reMap | |||
} | |||
func getRepoDetailInfo(repoDetailInfoMap map[string]int, userId int64, mostDownload map[int64]string, fourSeason map[string]map[string]interface{}) string { | |||
repoDetailInfo := make(map[string]interface{}) | |||
if total, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total"]; ok { | |||
repoDetailInfo["repo_total"] = fmt.Sprint(total) | |||
} | |||
if private, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_private"]; ok { | |||
repoDetailInfo["repo_is_private"] = fmt.Sprint(private) | |||
} | |||
if public, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_is_public"]; ok { | |||
repoDetailInfo["repo_is_public"] = fmt.Sprint(public) | |||
} | |||
if download, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_total_download"]; ok { | |||
repoDetailInfo["repo_total_download"] = fmt.Sprint(download) | |||
} | |||
if mostdownload, ok := repoDetailInfoMap[fmt.Sprint(userId)+"_most_download"]; ok { | |||
repoDetailInfo["repo_most_download_count"] = fmt.Sprint(mostdownload) | |||
} | |||
if mostdownloadName, ok := mostDownload[userId]; ok { | |||
repoDetailInfo["repo_most_download_name"] = mostdownloadName | |||
} | |||
if springRepo, ok := fourSeason[fmt.Sprint(userId)+"_spring"]; ok { | |||
repoDetailInfo["repo_spring"] = springRepo | |||
} | |||
if summerRepo, ok := fourSeason[fmt.Sprint(userId)+"_summer"]; ok { | |||
repoDetailInfo["repo_summer"] = summerRepo | |||
} | |||
if autumnRepo, ok := fourSeason[fmt.Sprint(userId)+"_autumn"]; ok { | |||
repoDetailInfo["repo_autumn"] = autumnRepo | |||
} | |||
if winterRepo, ok := fourSeason[fmt.Sprint(userId)+"_winter"]; ok { | |||
repoDetailInfo["repo_winter"] = winterRepo | |||
} | |||
if len(repoDetailInfo) > 0 { | |||
repoDetailInfoJson, _ := json.Marshal(repoDetailInfo) | |||
return string(repoDetailInfoJson) | |||
} else { | |||
return "" | |||
} | |||
} | |||
func getCodeInfo(dateRecordAll *UserBusinessAnalysisAll, commitCodeArrays []int) (string, float64) { | |||
if dateRecordAll.CommitCount > 0 { | |||
codeInfo := make(map[string]string) | |||
codeInfo["commit_count"] = fmt.Sprint(dateRecordAll.CommitCount) | |||
codeInfo["commit_line"] = fmt.Sprint(dateRecordAll.CommitCodeSize) | |||
score := 0.0 | |||
score = float64(dateRecordAll.CommitCodeSize) / float64(dateRecordAll.CommitCount) / float64(20000) | |||
if score < (float64(dateRecordAll.CommitCount) / float64(100)) { | |||
score = float64(dateRecordAll.CommitCount) / float64(100) | |||
} | |||
log.Info("len(commitCodeArrays)=" + fmt.Sprint(len(commitCodeArrays))) | |||
location := binarySearch(commitCodeArrays, dateRecordAll.CommitCodeSize) | |||
codeInfo["location"] = fmt.Sprintf("%.2f", float64(location+1)/float64(len(commitCodeArrays))) | |||
codeInfo["score"] = fmt.Sprintf("%.2f", score) | |||
codeInfoJson, _ := json.Marshal(codeInfo) | |||
return string(codeInfoJson), score | |||
} else { | |||
return "", 0 | |||
} | |||
} | |||
func binarySearch(arr []int, target int) int { | |||
low := 0 | |||
high := len(arr) - 1 | |||
mid := 0 | |||
for low <= high { | |||
mid = (low + high) / 2 | |||
if arr[mid] == target { | |||
return mid | |||
} else if arr[mid] < target { | |||
low = mid + 1 | |||
} else { | |||
high = mid - 1 | |||
} | |||
} | |||
return mid // 如果没有找到,返回-1 | |||
} | |||
func getMostActiveJson(userInfo map[string]int) string { | |||
mostActiveMap := make(map[string]string) | |||
if day, ok := userInfo["hour_day"]; ok { | |||
hour := userInfo["hour_hour"] | |||
month := userInfo["hour_month"] | |||
year := userInfo["hour_year"] | |||
delete(userInfo, "hour_day") | |||
delete(userInfo, "hour_hour") | |||
delete(userInfo, "hour_month") | |||
delete(userInfo, "hour_year") | |||
mostActiveMap["before_dawn"] = fmt.Sprint(year) + "/" + fmt.Sprint(month) + "/" + fmt.Sprint(day) + " " + fmt.Sprint(hour) | |||
} | |||
max := 0 | |||
max_day := "" | |||
for key, value := range userInfo { | |||
if value > max { | |||
max = value | |||
max_day = key | |||
} | |||
} | |||
mostActiveMap["most_active_day"] = max_day | |||
mostActiveMap["most_active_num"] = fmt.Sprint(max) | |||
mostActiveMapJson, _ := json.Marshal(mostActiveMap) | |||
return string(mostActiveMapJson) | |||
} | |||
func queryActionDays(start_unix int64, end_unix int64) map[int64]int { | |||
sess := x.NewSession() | |||
defer sess.Close() | |||
resultMap := make(map[int64]int) | |||
timeMap := make(map[int64]string) | |||
cond := "user_id=act_user_id and created_unix>=" + fmt.Sprint(start_unix) + " and created_unix<=" + fmt.Sprint(end_unix) | |||
count, err := sess.Where(cond).Count(new(Action)) | |||
if err != nil { | |||
log.Info("query action error. return.") | |||
return resultMap | |||
} | |||
var indexTotal int64 | |||
indexTotal = 0 | |||
for { | |||
actionList, err := sess.QueryInterface("select id,user_id,op_type,act_user_id,created_unix from public.action where " + cond + " order by id asc limit " + fmt.Sprint(PAGE_SIZE) + " offset " + fmt.Sprint(indexTotal)) | |||
if err != nil { | |||
log.Info("error:" + err.Error()) | |||
continue | |||
} | |||
log.Info("query action size=" + fmt.Sprint(len(actionList))) | |||
for _, actionRecord := range actionList { | |||
userId := convertInterfaceToInt64(actionRecord["user_id"]) | |||
created_unix := convertInterfaceToInt64(actionRecord["created_unix"]) | |||
created_time := time.Unix(created_unix, 0) | |||
time_str := created_time.Format("2006-01-02") | |||
if timeMap[userId] != time_str { | |||
if _, ok := resultMap[userId]; !ok { | |||
resultMap[userId] = 1 | |||
} else { | |||
resultMap[userId] += 1 | |||
} | |||
timeMap[userId] = time_str | |||
} | |||
} | |||
indexTotal += PAGE_SIZE | |||
if indexTotal >= count { | |||
break | |||
} | |||
} | |||
return resultMap | |||
} |
@@ -40,7 +40,7 @@ const maxSinglePutObjectSize = 1024 * 1024 * 1024 * 5 | |||
// maxMultipartPutObjectSize - maximum size 5TiB of object for | |||
// Multipart operation. | |||
const MaxMultipartPutObjectSize = 1024 * 1024 * 1024 * 200 | |||
const MaxMultipartPutObjectSize = 1024 * 1024 * 1024 * 250 | |||
// unsignedPayload - value to be set to X-Amz-Content-Sha256 header when | |||
// we don't want to sign the request payload | |||
@@ -22,7 +22,7 @@ var ( | |||
) | |||
const ( | |||
PresignedGetUrlExpireTime = time.Hour * 24 * 1 | |||
PresignedGetUrlExpireTime = time.Hour * 24 * 7 | |||
PresignedPutUrlExpireTime = time.Hour * 24 * 7 | |||
) | |||
@@ -599,7 +599,7 @@ func ObsGenMultiPartSignedUrl(objectName string, uploadId string, partNumber int | |||
input.Bucket = setting.Bucket | |||
input.Key = objectName | |||
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||
input.Expires = 24 * 60 * 60 | |||
input.Expires = 7 * 24 * 60 * 60 | |||
input.Method = obs.HttpMethodPut | |||
input.QueryParams = map[string]string{ | |||
@@ -622,7 +622,7 @@ func GetObsCreateSignedUrlByBucketAndKey(bucket, key string) (string, error) { | |||
input.Bucket = bucket | |||
input.Key = key | |||
input.Expires = 24 * 60 * 60 | |||
input.Expires = 7 * 24 * 60 * 60 | |||
input.Method = obs.HttpMethodGet | |||
comma := strings.LastIndex(key, "/") | |||
filename := key | |||
@@ -652,7 +652,7 @@ func ObsGetPreSignedUrl(objectName, fileName string) (string, error) { | |||
input.Key = objectName | |||
//strings.TrimPrefix(path.Join(setting.BasePath, path.Join(uuid[0:1], uuid[1:2], uuid, fileName)), "/") | |||
input.Bucket = setting.Bucket | |||
input.Expires = 24 * 60 * 60 | |||
input.Expires = 7 * 24 * 60 * 60 | |||
fileName = url.PathEscape(fileName) | |||
reqParams := make(map[string]string) | |||
@@ -21,7 +21,7 @@ import ( | |||
const ( | |||
PAGE_SIZE = 2000 | |||
Excel_File_Path = "/useranalysis/" | |||
USER_YEAR = 2022 | |||
USER_YEAR = 2023 | |||
) | |||
func getUserMetricsExcelHeader(ctx *context.Context) map[string]string { | |||
@@ -721,10 +721,14 @@ func TimingCountDataByDateAndReCount(date string, isReCount bool) { | |||
log.Info("endTime time:" + endTime.Format("2006-01-02 15:04:05")) | |||
warnEmailMessage := "用户统计信息入库失败,请尽快定位。" | |||
//startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location()) | |||
//endYear := startYear.AddDate(1, 0, 0) | |||
startYear := time.Date(USER_YEAR, 1, 1, 0, 0, 0, 1, t.Location()) | |||
endYear := startYear.AddDate(1, 0, 0) | |||
//models.RefreshUserYearTable(startYear, endYear) | |||
if time.Now().Year() == 2024 { | |||
log.Info("the day is 2024,so not update.") | |||
return | |||
} | |||
models.RefreshUserYearTable(startYear, endYear) | |||
//query wiki data | |||
log.Info("start to time count data") | |||
Dear OpenI User
Thank you for your continuous support to the Openl Qizhi Community AI Collaboration Platform. In order to protect your usage rights and ensure network security, we updated the Openl Qizhi Community AI Collaboration Platform Usage Agreement in January 2024. The updated agreement specifies that users are prohibited from using intranet penetration tools. After you click "Agree and continue", you can continue to use our services. Thank you for your cooperation and understanding.
For more agreement content, please refer to the《Openl Qizhi Community AI Collaboration Platform Usage Agreement》