fix: variable input and update chat time

This commit is contained in:
archer 2023-08-31 18:09:12 +08:00
parent 3420f677b6
commit b22c878cf9
No known key found for this signature in database
GPG Key ID: 569A5660D2379E28
15 changed files with 30 additions and 48 deletions

View File

@ -31,7 +31,8 @@
"Mark Count": "Mark Count", "Mark Count": "Mark Count",
"My Apps": "My Apps", "My Apps": "My Apps",
"Output Field Settings": "Output Field Settings", "Output Field Settings": "Output Field Settings",
"Paste Config": "Paste Config" "Paste Config": "Paste Config",
"Variable Key Repeat Tip": "Variable Key Repeat"
}, },
"chat": { "chat": {
"Admin Mark Content": "Corrected response", "Admin Mark Content": "Corrected response",

View File

@ -31,7 +31,8 @@
"Mark Count": "标注答案数量", "Mark Count": "标注答案数量",
"My Apps": "我的应用", "My Apps": "我的应用",
"Output Field Settings": "输出字段编辑", "Output Field Settings": "输出字段编辑",
"Paste Config": "粘贴配置" "Paste Config": "粘贴配置",
"Variable Key Repeat Tip": "变量 key 重复"
}, },
"chat": { "chat": {
"Admin Mark Content": "纠正后的回复", "Admin Mark Content": "纠正后的回复",

View File

@ -198,6 +198,8 @@ const ChatBox = (
chatHistory[chatHistory.length - 1]?.status !== 'finish', chatHistory[chatHistory.length - 1]?.status !== 'finish',
[chatHistory] [chatHistory]
); );
// compute variable input is finish.
const [variableInputFinish, setVariableInputFinish] = useState(false);
const variableIsFinish = useMemo(() => { const variableIsFinish = useMemo(() => {
if (!variableModules || chatHistory.length > 0) return true; if (!variableModules || chatHistory.length > 0) return true;
@ -208,8 +210,8 @@ const ChatBox = (
} }
} }
return true; return variableInputFinish;
}, [chatHistory.length, variableModules, variables]); }, [chatHistory.length, variableInputFinish, variableModules, variables]);
const { register, reset, getValues, setValue, handleSubmit } = useForm<Record<string, any>>({ const { register, reset, getValues, setValue, handleSubmit } = useForm<Record<string, any>>({
defaultValues: variables defaultValues: variables
@ -408,6 +410,7 @@ const ChatBox = (
] ]
); );
// output data
useImperativeHandle(ref, () => ({ useImperativeHandle(ref, () => ({
getChatHistory: () => chatHistory, getChatHistory: () => chatHistory,
resetVariables(e) { resetVariables(e) {
@ -420,6 +423,7 @@ const ChatBox = (
setVariables(e || defaultVal); setVariables(e || defaultVal);
}, },
resetHistory(e) { resetHistory(e) {
setVariableInputFinish(!!e.length);
setChatHistory(e); setChatHistory(e);
}, },
scrollToBottom scrollToBottom
@ -554,9 +558,7 @@ const ChatBox = (
label: item.value, label: item.value,
value: item.value value: item.value
}))} }))}
{...register(item.key, { value={getValues(item.key)}
required: item.required
})}
onchange={(e) => { onchange={(e) => {
setValue(item.key, e); setValue(item.key, e);
setRefresh(!refresh); setRefresh(!refresh);
@ -574,6 +576,7 @@ const ChatBox = (
onClick={handleSubmit((data) => { onClick={handleSubmit((data) => {
onUpdateVariable?.(data); onUpdateVariable?.(data);
setVariables(data); setVariables(data);
setVariableInputFinish(true);
})} })}
> >
{'开始对话'} {'开始对话'}

View File

@ -1,13 +1,6 @@
import type { ShareChatEditType } from '@/types/app'; import type { ShareChatEditType } from '@/types/app';
import type { AppSchema } from '@/types/mongoSchema'; import type { AppSchema } from '@/types/mongoSchema';
export enum OpenAiChatEnum {
'GPT35' = 'gpt-3.5-turbo',
'GPT3516k' = 'gpt-3.5-turbo-16k',
'FastAI-Plus' = 'gpt-4',
'FastAI-Plus32k' = 'gpt-4-32k'
}
export const defaultApp: AppSchema = { export const defaultApp: AppSchema = {
_id: '', _id: '',
userId: 'userId', userId: 'userId',

View File

@ -104,7 +104,6 @@ export async function pushDataToKb({
// count q token // count q token
const token = modelToolMap.countTokens({ const token = modelToolMap.countTokens({
model: 'gpt-3.5-turbo',
messages: [{ obj: 'System', value: item.q }] messages: [{ obj: 'System', value: item.q }]
}); });

View File

@ -69,7 +69,7 @@ export async function getVector({
.then(async (res) => { .then(async (res) => {
if (!res.data?.data?.[0]?.embedding) { if (!res.data?.data?.[0]?.embedding) {
// @ts-ignore // @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error'); return Promise.reject(res.data?.error?.message || 'Embedding API Error');
} }
return { return {
tokenLen: res.data.usage.total_tokens || 0, tokenLen: res.data.usage.total_tokens || 0,

View File

@ -4,13 +4,10 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai'; import { countOpenAIToken } from '@/utils/plugin/openai';
import { OpenAiChatEnum } from '@/constants/model';
type ModelType = `${OpenAiChatEnum}`;
type Props = { type Props = {
messages: ChatItemType[]; messages: ChatItemType[];
model: ModelType; model: string;
maxLen: number; maxLen: number;
}; };
type Response = ChatItemType[]; type Response = ChatItemType[];
@ -28,7 +25,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return jsonRes<Response>(res, { return jsonRes<Response>(res, {
data: gpt_chatItemTokenSlice({ data: gpt_chatItemTokenSlice({
messages, messages,
model,
maxToken: maxLen maxToken: maxLen
}) })
}); });
@ -42,11 +38,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export function gpt_chatItemTokenSlice({ export function gpt_chatItemTokenSlice({
messages, messages,
model = 'gpt-3.5-turbo',
maxToken maxToken
}: { }: {
messages: ChatItemType[]; messages: ChatItemType[];
model?: string;
maxToken: number; maxToken: number;
}) { }) {
let result: ChatItemType[] = []; let result: ChatItemType[] = [];
@ -54,7 +48,7 @@ export function gpt_chatItemTokenSlice({
for (let i = 0; i < messages.length; i++) { for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]]; const msgs = [...result, messages[i]];
const tokens = countOpenAIToken({ messages: msgs, model }); const tokens = countOpenAIToken({ messages: msgs });
if (tokens < maxToken) { if (tokens < maxToken) {
result = msgs; result = msgs;

View File

@ -35,7 +35,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// token check // token check
const token = modelToolMap.countTokens({ const token = modelToolMap.countTokens({
model: 'gpt-3.5-turbo',
messages: [{ obj: 'System', value: q }] messages: [{ obj: 'System', value: q }]
}); });

View File

@ -10,7 +10,7 @@ import type { VariableItemType } from '@/types/app';
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import { customAlphabet } from 'nanoid'; import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6); const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
import VariableEditModal from '../../../VariableEditModal'; import VariableEditModal, { addVariable } from '../../../VariableEditModal';
export const defaultVariable: VariableItemType = { export const defaultVariable: VariableItemType = {
id: nanoid(), id: nanoid(),
@ -105,7 +105,7 @@ const NodeUserGuide = ({ data }: NodeProps<FlowModuleItemType>) => {
variant={'base'} variant={'base'}
leftIcon={<AddIcon fontSize={'10px'} />} leftIcon={<AddIcon fontSize={'10px'} />}
onClick={() => { onClick={() => {
const newVariable = { ...defaultVariable, id: nanoid() }; const newVariable = addVariable();
updateVariables(variables.concat(newVariable)); updateVariables(variables.concat(newVariable));
setEditVariable(newVariable); setEditVariable(newVariable);
}} }}

View File

@ -532,6 +532,13 @@ const Settings = ({ appId }: { appId: string }) => {
variables.map((item) => (item.id === variable.id ? variable : item)) variables.map((item) => (item.id === variable.id ? variable : item))
); );
} else { } else {
// auth same key
if (variables.find((item) => item.key === variable.key)) {
return toast({
status: 'warning',
title: t('app.Variable Key Repeat Tip')
});
}
appendVariable(variable); appendVariable(variable);
} }

View File

@ -204,6 +204,6 @@ export const defaultVariable: VariableItemType = {
enums: [{ value: '' }] enums: [{ value: '' }]
}; };
export const addVariable = () => { export const addVariable = () => {
const newVariable = { ...defaultVariable, id: nanoid() }; const newVariable = { ...defaultVariable, key: nanoid(), id: nanoid() };
return newVariable; return newVariable;
}; };

View File

@ -1,6 +1,5 @@
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { sseResponse } from '@/service/utils/tools'; import { sseResponse } from '@/service/utils/tools';
import { OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai'; import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index'; import { ChatContextFilter } from '@/service/utils/chat/index';
@ -198,7 +197,6 @@ function filterQuote({
model: ChatModelItemType; model: ChatModelItemType;
}) { }) {
const sliceResult = modelToolMap.tokenSlice({ const sliceResult = modelToolMap.tokenSlice({
model: model.model,
maxToken: model.quoteMaxToken, maxToken: model.quoteMaxToken,
messages: quoteQA.map((item) => ({ messages: quoteQA.map((item) => ({
obj: ChatRoleEnum.System, obj: ChatRoleEnum.System,
@ -312,7 +310,6 @@ function getMaxTokens({
/* count response max token */ /* count response max token */
const promptsToken = modelToolMap.countTokens({ const promptsToken = modelToolMap.countTokens({
model: model.model,
messages: filterMessages messages: filterMessages
}); });
maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken; maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
@ -383,7 +380,6 @@ async function streamResponse({
} }
if (error) { if (error) {
console.log(error);
return Promise.reject(error); return Promise.reject(error);
} }

View File

@ -1,7 +1,6 @@
import { ChatItemType } from '@/types/chat'; import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { OpenAiChatEnum } from '@/constants/model';
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
export type ChatCompletionResponseType = { export type ChatCompletionResponseType = {
@ -14,7 +13,7 @@ export type StreamResponseType = {
chatResponse: any; chatResponse: any;
prompts: ChatItemType[]; prompts: ChatItemType[];
res: NextApiResponse; res: NextApiResponse;
model: `${OpenAiChatEnum}`; model: string;
[key: string]: any; [key: string]: any;
}; };
@ -45,7 +44,6 @@ export const ChatContextFilter = ({
// reduce token of systemPrompt // reduce token of systemPrompt
maxTokens -= modelToolMap.countTokens({ maxTokens -= modelToolMap.countTokens({
model,
messages: systemPrompts messages: systemPrompts
}); });
@ -57,7 +55,6 @@ export const ChatContextFilter = ({
chats.unshift(chatPrompts[i]); chats.unshift(chatPrompts[i]);
const tokens = modelToolMap.countTokens({ const tokens = modelToolMap.countTokens({
model,
messages: chats messages: chats
}); });

View File

@ -44,7 +44,7 @@ export async function saveChat({
]; ];
if (chatHistory) { if (chatHistory) {
promise.push([ promise.push(
Chat.updateOne( Chat.updateOne(
{ chatId, userId }, { chatId, userId },
{ {
@ -52,7 +52,7 @@ export async function saveChat({
updateTime: new Date() updateTime: new Date()
} }
) )
]); );
} else { } else {
promise.push( promise.push(
Chat.create({ Chat.create({

View File

@ -47,21 +47,13 @@ export const adaptChatItem_openAI = ({
})); }));
}; };
export function countOpenAIToken({ export function countOpenAIToken({ messages }: { messages: ChatItemType[] }) {
messages,
model = 'gpt-3.5-turbo'
}: {
messages: ChatItemType[];
model?: string;
}) {
const diffVal = model.startsWith('gpt-3.5-turbo') ? 3 : 2;
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true }); const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
const token = adaptMessages.reduce((sum, item) => { const token = adaptMessages.reduce((sum, item) => {
const text = `${item.role}\n${item.content}`; const text = `${item.role}\n${item.content}`;
const enc = getOpenAiEncMap(); const enc = getOpenAiEncMap();
const encodeText = enc.encode(text); const encodeText = enc.encode(text);
const tokens = encodeText.length + diffVal; const tokens = encodeText.length + 3; // 补充估算值
return sum + tokens; return sum + tokens;
}, 0); }, 0);