allExclusiveAndCommonUse: 'All Exclusive and Common Use',
exclusive: 'Exclusive',
commonUse: 'Common Use',
sceneType: "Community Scene Type",
allSceneType: "All Community Scene Type",
isExclusiveSpec: 'Is Exclusive Spec?',
allExclusiveAndCommonUseSpec: 'All Exclusive and Common Use Spec',
public: "Public",
exclusive: "Exclusive",
exclusiveSpec: 'Exclusive Spec',
commonUseSpec: 'Common Use Spec',
exclusiveOrg: 'Exclusive Organization',
exclusiveOrgTips: 'Multiple organization names are separated by semicolons',
computeCluster: 'Compute Cluster',
@@ -365,7 +372,7 @@ const en = {
forkModelSuccess: 'The model content has been copied and forked successfully!',
debugModel: 'Debug Model',
onlineInference: 'online Inference',
deleted:'Deleted',
deleted:'Deleted',
},
repos: {
activeOrganization: 'Active Organization',
@@ -621,7 +628,7 @@ const en = {
maxTaskTips: '<p><span>*</span>The platform only retains the results of debug, train, inference and evaluation tasks for nearly<span> 30 </span> days. <span>Tasks over 30 days will not be able to download results and view logs, and cannot be debugged or trained again</span></p>',
datasetFiles: 'Dataset files',
fileWasDeleted: 'The file has been deleted',
debugTaskEmptyTitle: 'Debug task has not been created',
debugTaskEmptyTitle: 'Debug task has not been created',
debugTaskEmptyTip0: 'Code version: You have not initialized the code repository, please <a href="{url}">initialized</a> first;',
debugTaskEmptyTip1: 'Running time: no more than 4 hours, it will automatically stop if it exceeds 4 hours;',
debugTaskEmptyTip2: 'Dataset: Cloud Brain 1 provides CPU/GPU,Cloud Brain 2 provides Ascend NPU.And dataset also needs to be uploaded to the corresponding environment;',
@@ -693,7 +700,7 @@ const en = {
modelSquare: {
llmHeader: 'Document dialogue experience',
chatGlm_intro: 'is an open source conversational language model that supports Chinese and English bilingualism, provided by Zhipu AI.',
llama2:'is a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. This is the repository for the 7B fine-tuned model, optimized for dialogue use cases and converted for the Hugging Face Transformers format.',
llama2:'is a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. This is the repository for the 7B fine-tuned model, optimized for dialogue use cases and converted for the Hugging Face Transformers format.',
dialogtips1: 'Hello 👋! Welcome to experience the large model knowledge base Q&A',
dialogtips21: 'This experience is based on',
dialogtips22: 'language model and m3-base vector model',
@@ -715,7 +722,7 @@ const en = {
deleteVbTips: 'Are you sure to delete the knowledge base files?',
uploadFile: 'Upload files',
uploadFileTips1: 'Drag the file here, or click <em>to upload</em>',
Thank you for your continuous support to the Openl Qizhi Community AI Collaboration Platform. In order to protect your usage rights and ensure network security, we updated the Openl Qizhi Community AI Collaboration Platform Usage Agreement in January 2024. The updated agreement specifies that users are prohibited from using intranet penetration tools. After you click "Agree and continue", you can continue to use our services. Thank you for your cooperation and understanding.