4.8.20 test (#3656)

* provider

* perf: model config
This commit is contained in:
Archer 2025-01-23 18:32:45 +08:00 committed by GitHub
parent 2c03abc6e1
commit 99ce976b06
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 558 additions and 3 deletions

View File

@ -20,6 +20,7 @@ export type ModelProviderIdType =
| 'Baichuan' | 'Baichuan'
| 'StepFun' | 'StepFun'
| 'Yi' | 'Yi'
| 'Siliconflow'
| 'Ollama' | 'Ollama'
| 'BAAI' | 'BAAI'
| 'FishAudio' | 'FishAudio'
@ -155,6 +156,11 @@ export const ModelProviderList: ModelProviderType[] = [
name: i18nT('common:model_moka'), name: i18nT('common:model_moka'),
avatar: 'model/moka' avatar: 'model/moka'
}, },
{
id: 'Siliconflow',
name: i18nT('common:model_siliconflow'),
avatar: 'model/siliconflow'
},
{ {
id: 'Other', id: 'Other',
name: i18nT('common:model_other'), name: i18nT('common:model_other'),

View File

@ -0,0 +1,10 @@
{
"provider": "Ernie",
"model": "Embedding-V1",
"name": "Embedding-V1",
"defaultToken": 512,
"maxToken": 1000,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,10 @@
{
"provider": "Ernie",
"model": "tao-8k",
"name": "tao-8k",
"defaultToken": 512,
"maxToken": 8000,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,10 @@
{
"provider": "BAAI",
"model": "bge-m3",
"name": "bge-m3",
"defaultToken": 512,
"maxToken": 8000,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,10 @@
{
"provider": "Doubao",
"model": "Doubao-embedding-large",
"name": "Doubao-embedding-large",
"defaultToken": 512,
"maxToken": 4096,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,10 @@
{
"provider": "Doubao",
"model": "Doubao-embedding",
"name": "Doubao-embedding",
"defaultToken": 512,
"maxToken": 4096,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,10 @@
{
"provider": "Hunyuan",
"model": "hunyuan-embedding",
"name": "hunyuan-embedding",
"defaultToken": 512,
"maxToken": 1024,
"charsPointsPrice": 0
}

View File

@ -0,0 +1,13 @@
{
"provider": "ChatGLM",
"model": "embedding-3",
"name": "embedding-3",
"defaultToken": 512,
"maxToken": 8000,
"defaultConfig": {
"dimensions": 1024
},
"charsPointsPrice": 0
}

View File

@ -0,0 +1,29 @@
{
"provider": "Gemini",
"model": "gemini-exp-1206",
"name": "gemini-exp-1206",
"censor": false,
"charsPointsPrice": 0,
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {}
}

View File

@ -0,0 +1,29 @@
{
"provider": "Hunyuan",
"model": "hunyuan-pro-32k(测试)",
"name": "hunyuan-pro-32k(测试)",
"censor": false,
"charsPointsPrice": 0,
"maxContext": 28000,
"maxResponse": 4000,
"quoteMaxToken": 28000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {}
}

View File

@ -0,0 +1,29 @@
{
"provider": "Hunyuan",
"model": "hunyuan-turbo-vision",
"name": "hunyuan-turbo-vision",
"censor": false,
"charsPointsPrice": 0,
"maxContext": 6000,
"maxResponse": 2000,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"customCQPrompt": "",
"usedInExtractFields": false,
"usedInQueryExtension": false,
"customExtractPrompt": "",
"usedInToolCall": false,
"defaultConfig": {},
"fieldMap": {}
}

View File

@ -0,0 +1,29 @@
{
"provider": "Qwen",
"model": "qwen-coder-turbo",
"name": "qwen-coder-turbo",
"censor": false,
"charsPointsPrice": 0,
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"customCQPrompt": "",
"usedInExtractFields": false,
"usedInQueryExtension": false,
"customExtractPrompt": "",
"usedInToolCall": false,
"defaultConfig": {},
"fieldMap": {}
}

View File

@ -0,0 +1,6 @@
{
"provider": "Siliconflow",
"model": "BAAI/bge-reranker-v2-m3",
"name": "BAAI/bge-reranker-v2-m3",
"charsPointsPrice": 0
}

View File

@ -0,0 +1,6 @@
{
"provider": "Siliconflow",
"model": "FunAudioLLM/SenseVoiceSmall",
"name": "FunAudioLLM/SenseVoiceSmall",
"charsPointsPrice": 0
}

View File

@ -0,0 +1,192 @@
{
"provider": "MiniMax",
"model": "speech-01-turbo",
"name": "Minimax-speech-01-turbo",
"charsPointsPrice": 0,
"voices": [
{
"label": "minimax-male-qn-qingse",
"value": "male-qn-qingse"
},
{
"label": "minimax-male-qn-jingying",
"value": "male-qn-jingying"
},
{
"label": "minimax-male-qn-badao",
"value": "male-qn-badao"
},
{
"label": "minimax-male-qn-daxuesheng",
"value": "male-qn-daxuesheng"
},
{
"label": "minimax-female-shaonv",
"value": "female-shaonv"
},
{
"label": "minimax-female-yujie",
"value": "female-yujie"
},
{
"label": "minimax-female-chengshu",
"value": "female-chengshu"
},
{
"label": "minimax-female-tianmei",
"value": "female-tianmei"
},
{
"label": "minimax-presenter_male",
"value": "presenter_male"
},
{
"label": "minimax-presenter_female",
"value": "presenter_female"
},
{
"label": "minimax-audiobook_male_1",
"value": "audiobook_male_1"
},
{
"label": "minimax-audiobook_male_2",
"value": "audiobook_male_2"
},
{
"label": "minimax-audiobook_female_1",
"value": "audiobook_female_1"
},
{
"label": "minimax-audiobook_female_2",
"value": "audiobook_female_2"
},
{
"label": "minimax-male-qn-qingse-jingpin",
"value": "male-qn-qingse-jingpin"
},
{
"label": "minimax-male-qn-jingying-jingpin",
"value": "male-qn-jingying-jingpin"
},
{
"label": "minimax-male-qn-badao-jingpin",
"value": "male-qn-badao-jingpin"
},
{
"label": "minimax-male-qn-daxuesheng-jingpin",
"value": "male-qn-daxuesheng-jingpin"
},
{
"label": "minimax-female-shaonv-jingpin",
"value": "female-shaonv-jingpin"
},
{
"label": "minimax-female-yujie-jingpin",
"value": "female-yujie-jingpin"
},
{
"label": "minimax-female-chengshu-jingpin",
"value": "female-chengshu-jingpin"
},
{
"label": "minimax-female-tianmei-jingpin",
"value": "female-tianmei-jingpin"
},
{
"label": "minimax-clever_boy",
"value": "clever_boy"
},
{
"label": "minimax-cute_boy",
"value": "cute_boy"
},
{
"label": "minimax-lovely_girl",
"value": "lovely_girl"
},
{
"label": "minimax-cartoon_pig",
"value": "cartoon_pig"
},
{
"label": "minimax-bingjiao_didi",
"value": "bingjiao_didi"
},
{
"label": "minimax-junlang_nanyou",
"value": "junlang_nanyou"
},
{
"label": "minimax-chunzhen_xuedi",
"value": "chunzhen_xuedi"
},
{
"label": "minimax-lengdan_xiongzhang",
"value": "lengdan_xiongzhang"
},
{
"label": "minimax-badao_shaoye",
"value": "badao_shaoye"
},
{
"label": "minimax-tianxin_xiaoling",
"value": "tianxin_xiaoling"
},
{
"label": "minimax-qiaopi_mengmei",
"value": "qiaopi_mengmei"
},
{
"label": "minimax-wumei_yujie",
"value": "wumei_yujie"
},
{
"label": "minimax-diadia_xuemei",
"value": "diadia_xuemei"
},
{
"label": "minimax-danya_xuejie",
"value": "danya_xuejie"
},
{
"label": "minimax-Santa_Claus",
"value": "Santa_Claus"
},
{
"label": "minimax-Grinch",
"value": "Grinch"
},
{
"label": "minimax-Rudolph",
"value": "Rudolph"
},
{
"label": "minimax-Arnold",
"value": "Arnold"
},
{
"label": "minimax-Charming_Santa",
"value": "Charming_Santa"
},
{
"label": "minimax-Charming_Lady",
"value": "Charming_Lady"
},
{
"label": "minimax-Sweet_Girl",
"value": "Sweet_Girl"
},
{
"label": "minimax-Cute_Elf",
"value": "Cute_Elf"
},
{
"label": "minimax-Attractive_Girl",
"value": "Attractive_Girl"
},
{
"label": "minimax-Serene_Woman",
"value": "Serene_Woman"
}
]
}

View File

@ -0,0 +1,40 @@
{
"provider": "Siliconflow",
"model": "FunAudioLLM/CosyVoice2-0.5B",
"name": "FunAudioLLM/CosyVoice2-0.5B",
"charsPointsPrice": 0,
"voices": [
{
"label": "alex",
"value": "FunAudioLLM/CosyVoice2-0.5B:alex"
},
{
"label": "anna",
"value": "FunAudioLLM/CosyVoice2-0.5B:anna"
},
{
"label": "bella",
"value": "FunAudioLLM/CosyVoice2-0.5B:bella"
},
{
"label": "benjamin",
"value": "FunAudioLLM/CosyVoice2-0.5B:benjamin"
},
{
"label": "charles",
"value": "FunAudioLLM/CosyVoice2-0.5B:charles"
},
{
"label": "claire",
"value": "FunAudioLLM/CosyVoice2-0.5B:claire"
},
{
"label": "david",
"value": "FunAudioLLM/CosyVoice2-0.5B:david"
},
{
"label": "diana",
"value": "FunAudioLLM/CosyVoice2-0.5B:diana"
}
]
}

View File

@ -0,0 +1,40 @@
{
"provider": "Siliconflow",
"model": "RVC-Boss/GPT-SoVITS",
"name": "RVC-Boss/GPT-SoVITS",
"charsPointsPrice": 0,
"voices": [
{
"label": "alex",
"value": "RVC-Boss/GPT-SoVITS:alex"
},
{
"label": "anna",
"value": "RVC-Boss/GPT-SoVITS:anna"
},
{
"label": "bella",
"value": "RVC-Boss/GPT-SoVITS:bella"
},
{
"label": "benjamin",
"value": "RVC-Boss/GPT-SoVITS:benjamin"
},
{
"label": "charles",
"value": "RVC-Boss/GPT-SoVITS:charles"
},
{
"label": "claire",
"value": "RVC-Boss/GPT-SoVITS:claire"
},
{
"label": "david",
"value": "RVC-Boss/GPT-SoVITS:david"
},
{
"label": "diana",
"value": "RVC-Boss/GPT-SoVITS:diana"
}
]
}

View File

@ -0,0 +1,40 @@
{
"provider": "Siliconflow",
"model": "fishaudio/fish-speech-1.5",
"name": "fish-speech-1.5",
"charsPointsPrice": 0,
"voices": [
{
"label": "alex",
"value": "fishaudio/fish-speech-1.5:alex"
},
{
"label": "anna",
"value": "fishaudio/fish-speech-1.5:anna"
},
{
"label": "bella",
"value": "fishaudio/fish-speech-1.5:bella"
},
{
"label": "benjamin",
"value": "fishaudio/fish-speech-1.5:benjamin"
},
{
"label": "charles",
"value": "fishaudio/fish-speech-1.5:charles"
},
{
"label": "claire",
"value": "fishaudio/fish-speech-1.5:claire"
},
{
"label": "david",
"value": "fishaudio/fish-speech-1.5:david"
},
{
"label": "diana",
"value": "fishaudio/fish-speech-1.5:diana"
}
]
}

View File

@ -270,7 +270,6 @@ export const iconPaths = {
import('./icons/core/workflow/template/datasource.svg'), import('./icons/core/workflow/template/datasource.svg'),
'core/workflow/template/duckduckgo': () => 'core/workflow/template/duckduckgo': () =>
import('./icons/core/workflow/template/duckduckgo.svg'), import('./icons/core/workflow/template/duckduckgo.svg'),
'core/workflow/template/sleep': () => import('./icons/core/workflow/template/sleep.svg'),
'core/workflow/template/extractJson': () => 'core/workflow/template/extractJson': () =>
import('./icons/core/workflow/template/extractJson.svg'), import('./icons/core/workflow/template/extractJson.svg'),
'core/workflow/template/fetchUrl': () => import('./icons/core/workflow/template/fetchUrl.svg'), 'core/workflow/template/fetchUrl': () => import('./icons/core/workflow/template/fetchUrl.svg'),
@ -296,6 +295,7 @@ export const iconPaths = {
'core/workflow/template/reply': () => import('./icons/core/workflow/template/reply.svg'), 'core/workflow/template/reply': () => import('./icons/core/workflow/template/reply.svg'),
'core/workflow/template/runApp': () => import('./icons/core/workflow/template/runApp.svg'), 'core/workflow/template/runApp': () => import('./icons/core/workflow/template/runApp.svg'),
'core/workflow/template/searxng': () => import('./icons/core/workflow/template/searxng.svg'), 'core/workflow/template/searxng': () => import('./icons/core/workflow/template/searxng.svg'),
'core/workflow/template/sleep': () => import('./icons/core/workflow/template/sleep.svg'),
'core/workflow/template/stopTool': () => import('./icons/core/workflow/template/stopTool.svg'), 'core/workflow/template/stopTool': () => import('./icons/core/workflow/template/stopTool.svg'),
'core/workflow/template/systemConfig': () => 'core/workflow/template/systemConfig': () =>
import('./icons/core/workflow/template/systemConfig.svg'), import('./icons/core/workflow/template/systemConfig.svg'),
@ -377,18 +377,19 @@ export const iconPaths = {
'model/groq': () => import('./icons/model/groq.svg'), 'model/groq': () => import('./icons/model/groq.svg'),
'model/huggingface': () => import('./icons/model/huggingface.svg'), 'model/huggingface': () => import('./icons/model/huggingface.svg'),
'model/hunyuan': () => import('./icons/model/hunyuan.svg'), 'model/hunyuan': () => import('./icons/model/hunyuan.svg'),
'model/intern': () => import('./icons/model/intern.svg'),
'model/meta': () => import('./icons/model/meta.svg'), 'model/meta': () => import('./icons/model/meta.svg'),
'model/minimax': () => import('./icons/model/minimax.svg'), 'model/minimax': () => import('./icons/model/minimax.svg'),
'model/mistral': () => import('./icons/model/mistral.svg'), 'model/mistral': () => import('./icons/model/mistral.svg'),
'model/moka': () => import('./icons/model/moka.svg'),
'model/moonshot': () => import('./icons/model/moonshot.svg'), 'model/moonshot': () => import('./icons/model/moonshot.svg'),
'model/ollama': () => import('./icons/model/ollama.svg'), 'model/ollama': () => import('./icons/model/ollama.svg'),
'model/openai': () => import('./icons/model/openai.svg'), 'model/openai': () => import('./icons/model/openai.svg'),
'model/qwen': () => import('./icons/model/qwen.svg'), 'model/qwen': () => import('./icons/model/qwen.svg'),
'model/siliconflow': () => import('./icons/model/siliconflow.svg'),
'model/sparkDesk': () => import('./icons/model/sparkDesk.svg'), 'model/sparkDesk': () => import('./icons/model/sparkDesk.svg'),
'model/stepfun': () => import('./icons/model/stepfun.svg'), 'model/stepfun': () => import('./icons/model/stepfun.svg'),
'model/yi': () => import('./icons/model/yi.svg'), 'model/yi': () => import('./icons/model/yi.svg'),
'model/intern': () => import('./icons/model/intern.svg'),
'model/moka': () => import('./icons/model/moka.svg'),
more: () => import('./icons/more.svg'), more: () => import('./icons/more.svg'),
moreLine: () => import('./icons/moreLine.svg'), moreLine: () => import('./icons/moreLine.svg'),
out: () => import('./icons/out.svg'), out: () => import('./icons/out.svg'),

View File

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 120 120">
<path fill="#8358F6" fill-rule="evenodd" d="M100.74 12h-7.506c-24.021 0-37.867 15.347-37.867 38.867V54.9a30.862 30.862 0 0 0-8.507-1.196C29.816 53.703 16 67.52 16 84.563c0 17.044 13.816 30.86 30.86 30.86 17.044 0 30.86-13.816 30.86-30.86 0-2.073-.209-4.14-.623-6.172h23.643c6.225-.023 11.26-5.076 11.26-11.301 0-6.226-5.035-11.279-11.26-11.302H77.22v-5.922c0-9.008 6.505-15.513 16.014-15.513h7.506c6.107-.093 11.01-5.069 11.01-11.177 0-6.107-4.903-11.084-11.01-11.176zM56.035 84.563a9.175 9.175 0 1 0-18.35 0 9.175 9.175 0 0 0 18.35 0z"/>
</svg>

After

Width:  |  Height:  |  Size: 610 B

View File

@ -23,6 +23,8 @@
"model.custom_extract_prompt": "Custom content extraction prompt words", "model.custom_extract_prompt": "Custom content extraction prompt words",
"model.custom_extract_prompt_tip": "Override system prompt word, default is:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"", "model.custom_extract_prompt_tip": "Override system prompt word, default is:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"",
"model.dataset_process": "Dataset file parse", "model.dataset_process": "Dataset file parse",
"model.defaultConfig": "Additional Body parameters",
"model.defaultConfig_tip": "Each request will carry this additional Body parameter.",
"model.default_config": "Body extra fields", "model.default_config": "Body extra fields",
"model.default_config_tip": "When initiating a conversation request, merge this configuration. \nFor example:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"", "model.default_config_tip": "When initiating a conversation request, merge this configuration. \nFor example:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"",
"model.default_system_chat_prompt": "Default prompt", "model.default_system_chat_prompt": "Default prompt",

View File

@ -940,6 +940,7 @@
"model_moonshot": "Moonshot", "model_moonshot": "Moonshot",
"model_other": "Other", "model_other": "Other",
"model_qwen": "Qwen", "model_qwen": "Qwen",
"model_siliconflow": "Siliconflow",
"model_sparkdesk": "SprkDesk", "model_sparkdesk": "SprkDesk",
"model_stepfun": "StepFun", "model_stepfun": "StepFun",
"model_yi": "Yi", "model_yi": "Yi",

View File

@ -23,6 +23,8 @@
"model.custom_extract_prompt": "自定义内容提取提示词", "model.custom_extract_prompt": "自定义内容提取提示词",
"model.custom_extract_prompt_tip": "覆盖系统的提示词,默认为:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"", "model.custom_extract_prompt_tip": "覆盖系统的提示词,默认为:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"",
"model.dataset_process": "用于知识库文件处理", "model.dataset_process": "用于知识库文件处理",
"model.defaultConfig": "额外 Body 参数",
"model.defaultConfig_tip": "每次请求时候,都会携带该额外 Body 参数",
"model.default_config": "Body 额外字段", "model.default_config": "Body 额外字段",
"model.default_config_tip": "发起对话请求时候,合并该配置。例如:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"", "model.default_config_tip": "发起对话请求时候,合并该配置。例如:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"",
"model.default_system_chat_prompt": "默认提示词", "model.default_system_chat_prompt": "默认提示词",

View File

@ -943,6 +943,7 @@
"model_moonshot": "月之暗面", "model_moonshot": "月之暗面",
"model_other": "其他", "model_other": "其他",
"model_qwen": "阿里千问", "model_qwen": "阿里千问",
"model_siliconflow": "硅基流动",
"model_sparkdesk": "讯飞星火", "model_sparkdesk": "讯飞星火",
"model_stepfun": "阶跃星辰", "model_stepfun": "阶跃星辰",
"model_yi": "零一万物", "model_yi": "零一万物",

View File

@ -22,6 +22,8 @@
"model.custom_extract_prompt": "自訂內容提取提示詞", "model.custom_extract_prompt": "自訂內容提取提示詞",
"model.custom_extract_prompt_tip": "覆蓋系統的提示詞,預設為:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"", "model.custom_extract_prompt_tip": "覆蓋系統的提示詞,預設為:\n\"\"\"\n你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。\n<提取要求>\n{{description}}\n</提取要求>\n\n<提取规则>\n- 本次需提取的 json 字符串,需符合 JsonSchema 的规则。\n- type 代表数据类型; key 代表字段名; description 代表字段的描述; enum 是枚举值,代表可选的 value。\n- 如果没有可提取的内容,忽略该字段。\n</提取规则>\n\n<JsonSchema>\n{{json}}\n</JsonSchema>\n\n<对话记录>\n{{text}}\n</对话记录>\n\n提取的 json 字符串:\n\"\"\"",
"model.dataset_process": "用於知識庫文件處理", "model.dataset_process": "用於知識庫文件處理",
"model.defaultConfig": "額外 Body 參數",
"model.defaultConfig_tip": "每次請求時候,都會攜帶該額外 Body 參數",
"model.default_config": "Body 額外字段", "model.default_config": "Body 額外字段",
"model.default_config_tip": "發起對話請求時候,合併該配置。例如:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"", "model.default_config_tip": "發起對話請求時候,合併該配置。例如:\n\"\"\"\n{\n \"temperature\": 1,\n \"max_tokens\": null\n}\n\"\"\"",
"model.default_system_chat_prompt": "預設提示詞", "model.default_system_chat_prompt": "預設提示詞",

View File

@ -939,6 +939,7 @@
"model_moonshot": "月之暗面", "model_moonshot": "月之暗面",
"model_other": "其他", "model_other": "其他",
"model_qwen": "阿里千問", "model_qwen": "阿里千問",
"model_siliconflow": "矽基流動",
"model_sparkdesk": "訊飛星火", "model_sparkdesk": "訊飛星火",
"model_stepfun": "階躍星辰", "model_stepfun": "階躍星辰",
"model_yi": "零一萬物", "model_yi": "零一萬物",

View File

@ -728,6 +728,29 @@ const ModelEditModal = ({
</Flex> </Flex>
</Td> </Td>
</Tr> </Tr>
<Tr>
<Td>
<HStack spacing={1}>
<Box>{t('account:model.defaultConfig')}</Box>
<QuestionTip label={t('account:model.defaultConfig_tip')} />
</HStack>
</Td>
<Td textAlign={'right'}>
<Flex justifyContent={'flex-end'}>
<JsonEditor
value={JSON.stringify(getValues('defaultConfig'), null, 2)}
onChange={(e) => {
try {
setValue('defaultConfig', JSON.parse(e));
} catch (error) {
console.error(error);
}
}}
{...InputStyles}
/>
</Flex>
</Td>
</Tr>
</> </>
)} )}
{isTTSModel && ( {isTTSModel && (