From 176c5a4d7959b06a2ce187afe713fe95c8767d8d Mon Sep 17 00:00:00 2001 From: archer <545436317@qq.com> Date: Tue, 30 May 2023 21:27:09 +0800 Subject: [PATCH] fix: prompts filter --- src/pages/api/chat/chat.ts | 6 ++++-- src/pages/api/chat/shareChat/chat.ts | 6 ++++-- src/pages/api/openapi/chat/chat.ts | 2 +- src/service/utils/chat/openai.ts | 1 + 4 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/pages/api/chat/chat.ts b/src/pages/api/chat/chat.ts index 95314a059..90ded6770 100644 --- a/src/pages/api/chat/chat.ts +++ b/src/pages/api/chat/chat.ts @@ -129,7 +129,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) ); // 发出 chat 请求 - const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({ + const { streamResponse, responseMessages } = await modelServiceToolMap[ + model.chat.chatModel + ].chatCompletion({ apiKey: userOpenAiKey || systemAuthKey, temperature: +temperature, messages: prompts, @@ -147,7 +149,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) model: model.chat.chatModel, res, chatResponse: streamResponse, - prompts + prompts: responseMessages }); // save chat diff --git a/src/pages/api/chat/shareChat/chat.ts b/src/pages/api/chat/shareChat/chat.ts index 8d3d4a31e..63d569cf5 100644 --- a/src/pages/api/chat/shareChat/chat.ts +++ b/src/pages/api/chat/shareChat/chat.ts @@ -88,7 +88,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) ); // 发出请求 - const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({ + const { streamResponse, responseMessages } = await modelServiceToolMap[ + model.chat.chatModel + ].chatCompletion({ apiKey: userOpenAiKey || systemAuthKey, temperature: +temperature, messages: prompts, @@ -106,7 +108,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) model: model.chat.chatModel, res, chatResponse: streamResponse, - prompts + prompts: responseMessages }); res.end(); diff --git a/src/pages/api/openapi/chat/chat.ts b/src/pages/api/openapi/chat/chat.ts index 3af123876..dcecbe173 100644 --- a/src/pages/api/openapi/chat/chat.ts +++ b/src/pages/api/openapi/chat/chat.ts @@ -140,7 +140,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex model: model.chat.chatModel, res, chatResponse: streamResponse, - prompts + prompts: responseMessages }); res.end(); return { diff --git a/src/service/utils/chat/openai.ts b/src/service/utils/chat/openai.ts index 918387ec9..703bd1747 100644 --- a/src/service/utils/chat/openai.ts +++ b/src/service/utils/chat/openai.ts @@ -104,6 +104,7 @@ export const openAiStreamResponse = async ({ obj: ChatRoleEnum.AI, value: responseContent }); + const totalTokens = modelToolMap[model].countTokens({ messages: finishMessages });