perf: search prompt

This commit is contained in:
archer 2023-05-09 16:32:02 +08:00
parent f52f514f5f
commit a837552b56
No known key found for this signature in database
GPG Key ID: 569A5660D2379E28
4 changed files with 25 additions and 27 deletions

View File

@ -55,7 +55,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// 使用了知识库搜索
if (model.chat.useKb) {
const { code, searchPrompt } = await searchKb({
const { code, searchPrompt, aiPrompt } = await searchKb({
userOpenAiKey,
prompts,
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity,
@ -68,6 +68,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.send(searchPrompt?.value);
}
if (aiPrompt) {
prompts.splice(prompts.length - 1, 0, aiPrompt);
}
searchPrompt && prompts.unshift(searchPrompt);
} else {
// 没有用知识库搜索,仅用系统提示词

View File

@ -508,7 +508,7 @@ const Chat = ({
isLeavePage.current = true;
controller.current?.abort();
};
}, []);
}, [modelId, chatId]);
return (
<Flex

View File

@ -61,13 +61,7 @@ const SelectFileModal = ({
const { openConfirm, ConfirmChild } = useConfirm({
content: `确认导入该文件,需要一定时间进行拆解,该任务无法终止!如果余额不足,未完成的任务会被直接清除。一共 ${
splitRes.chunks.length
} ${
splitRes.tokens
? `大约 ${splitRes.tokens} 个tokens, 约 ${formatPrice(
splitRes.tokens * modeMap[mode].price
)} `
: ''
}`
} ${splitRes.tokens ? `大约 ${splitRes.tokens} 个tokens。` : ''}`
});
const onSelectFile = useCallback(

View File

@ -24,7 +24,11 @@ export const searchKb = async ({
}): Promise<{
code: 200 | 201;
searchPrompt?: {
obj: `${ChatRoleEnum}`;
obj: `${ChatRoleEnum.System}`;
value: string;
};
aiPrompt?: {
obj: `${ChatRoleEnum.AI}`;
value: string;
};
}> => {
@ -85,24 +89,11 @@ export const searchKb = async ({
};
const filterRate = filterRateMap[systemPrompts.length] || filterRateMap[0];
// count fixed system prompt
const fixedSystemPrompt = `
${model.chat.systemPrompt}
${
model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity ? '不回答知识库外的内容.' : ''
}
:`;
const fixedSystemTokens = modelToolMap[model.chat.chatModel].countTokens({
messages: [{ obj: 'System', value: fixedSystemPrompt }]
});
const maxTokens = modelConstantsData.systemMaxToken - fixedSystemTokens;
const filterSystemPrompt = filterRate
.map((rate, i) =>
modelToolMap[model.chat.chatModel].sliceText({
text: systemPrompts[i],
length: Math.floor(maxTokens * rate)
length: Math.floor(modelConstantsData.systemMaxToken * rate)
})
)
.join('\n');
@ -112,7 +103,7 @@ ${
return {
code: 201,
searchPrompt: {
obj: ChatRoleEnum.AI,
obj: ChatRoleEnum.System,
value: '对不起,你的问题不在知识库中。'
}
};
@ -135,7 +126,17 @@ ${
code: 200,
searchPrompt: {
obj: ChatRoleEnum.System,
value: `${fixedSystemPrompt}'${filterSystemPrompt}'`
}
value: `知识库:'${filterSystemPrompt}'`
},
aiPrompt: ModelVectorSearchModeEnum.hightSimilarity
? {
obj: 'AI',
value: `我来玩一个问答游戏,规则为:
1.
2."${model.chat.systemPrompt || model.name}"
3.
4.,"我不知道。"`
}
: undefined
};
};