perf: response tag;feat: history quote

This commit is contained in:
archer 2023-08-15 09:55:00 +08:00
parent b8a65e1742
commit cc57a7e27e
No known key found for this signature in database
GPG Key ID: 569A5660D2379E28
9 changed files with 39 additions and 32 deletions

View File

@ -51,7 +51,7 @@ const ResponseTags = ({
bg: 'transparent' bg: 'transparent'
}; };
return ( return responseData.length === 0 ? null : (
<Flex alignItems={'center'} mt={2} flexWrap={'wrap'}> <Flex alignItems={'center'} mt={2} flexWrap={'wrap'}>
{quoteList.length > 0 && ( {quoteList.length > 0 && (
<MyTooltip label="查看引用"> <MyTooltip label="查看引用">

View File

@ -54,6 +54,7 @@ import styles from './index.module.scss';
const textareaMinH = '22px'; const textareaMinH = '22px';
type generatingMessageProps = { text?: string; name?: string; status?: 'running' | 'finish' }; type generatingMessageProps = { text?: string; name?: string; status?: 'running' | 'finish' };
export type StartChatFnProps = { export type StartChatFnProps = {
chatList: ChatSiteItemType[];
messages: MessageItemType[]; messages: MessageItemType[];
controller: AbortController; controller: AbortController;
variables: Record<string, any>; variables: Record<string, any>;
@ -311,6 +312,7 @@ const ChatBox = (
const messages = adaptChatItem_openAI({ messages: newChatList, reserveId: true }); const messages = adaptChatItem_openAI({ messages: newChatList, reserveId: true });
const { responseData } = await onStartChat({ const { responseData } = await onStartChat({
chatList: newChatList,
messages, messages,
controller: abortSignal, controller: abortSignal,
generatingMessage, generatingMessage,

View File

@ -26,6 +26,12 @@ const PayRecordTable = () => {
const [payOrders, setPayOrders] = useState<PaySchema[]>([]); const [payOrders, setPayOrders] = useState<PaySchema[]>([]);
const { toast } = useToast(); const { toast } = useToast();
const { isInitialLoading, refetch } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
const handleRefreshPayOrder = useCallback( const handleRefreshPayOrder = useCallback(
async (payId: string) => { async (payId: string) => {
setIsLoading(true); setIsLoading(true);
@ -36,8 +42,6 @@ const PayRecordTable = () => {
title: data, title: data,
status: 'success' status: 'success'
}); });
const res = await getPayOrders();
setPayOrders(res);
} catch (error: any) { } catch (error: any) {
toast({ toast({
title: error?.message, title: error?.message,
@ -45,18 +49,15 @@ const PayRecordTable = () => {
}); });
console.log(error); console.log(error);
} }
try {
refetch();
} catch (error) {}
setIsLoading(false); setIsLoading(false);
}, },
[setIsLoading, toast] [refetch, setIsLoading, toast]
); );
const { isInitialLoading } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
return ( return (
<Box position={'relative'} h={'100%'}> <Box position={'relative'} h={'100%'}>
{!isInitialLoading && payOrders.length === 0 ? ( {!isInitialLoading && payOrders.length === 0 ? (

View File

@ -4,16 +4,14 @@ import { authUser } from '@/service/utils/auth';
import { sseErrRes } from '@/service/response'; import { sseErrRes } from '@/service/response';
import { sseResponseEventEnum } from '@/constants/chat'; import { sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools'; import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app'; import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions'; import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt';
import { pushTaskBill } from '@/service/events/pushBill'; import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user'; import { BillSourceEnum } from '@/constants/user';
import { ChatItemType } from '@/types/chat';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
export type Props = { export type Props = {
history: MessageItemType[]; history: ChatItemType[];
prompt: string; prompt: string;
modules: AppModuleItemType[]; modules: AppModuleItemType[];
variables: Record<string, any>; variables: Record<string, any>;
@ -51,7 +49,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
variables, variables,
user, user,
params: { params: {
history: gptMessage2ChatType(history), history,
userChatInput: prompt userChatInput: prompt
}, },
stream: true, stream: true,

View File

@ -5,6 +5,7 @@ import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo'; import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose'; import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
export type Props = { export type Props = {
chatId?: string; chatId?: string;
@ -55,10 +56,12 @@ export async function getChatHistory({
{ {
$project: { $project: {
obj: '$content.obj', obj: '$content.obj',
value: '$content.value' value: '$content.value',
[TaskResponseKeyEnum.responseData]: `$content.responseData`
} }
} }
]); ]);
console.log(history);
return { history }; return { history };
} }

View File

@ -38,19 +38,19 @@ const ChatTest = (
const isOpen = useMemo(() => modules && modules.length > 0, [modules]); const isOpen = useMemo(() => modules && modules.length > 0, [modules]);
const startChat = useCallback( const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => { async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen = const historyMaxLen =
modules modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode) ?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0; ?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2); const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据 // 流请求,获取数据
const { responseText, responseData } = await streamFetch({ const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest', url: '/api/chat/chatTest',
data: { data: {
history, history,
prompt: messages[messages.length - 2].content, prompt: chatList[chatList.length - 2].value,
modules, modules,
variables, variables,
appId: app._id, appId: app._id,

View File

@ -572,19 +572,19 @@ const ChatTest = ({ appId }: { appId: string }) => {
const [modules, setModules] = useState<AppModuleItemType[]>([]); const [modules, setModules] = useState<AppModuleItemType[]>([]);
const startChat = useCallback( const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => { async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen = const historyMaxLen =
modules modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode) ?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0; ?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2); const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据 // 流请求,获取数据
const { responseText, responseData } = await streamFetch({ const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest', url: '/api/chat/chatTest',
data: { data: {
history, history,
prompt: messages[messages.length - 2].content, prompt: chatList[chatList.length - 2].value,
modules, modules,
variables, variables,
appId, appId,

View File

@ -63,6 +63,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
} }
const { filterQuoteQA, quotePrompt } = filterQuote({ const { filterQuoteQA, quotePrompt } = filterQuote({
history,
quoteQA, quoteQA,
model: modelConstantsData model: modelConstantsData
}); });
@ -181,23 +182,32 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
}; };
function filterQuote({ function filterQuote({
history = [],
quoteQA = [], quoteQA = [],
model model
}: { }: {
history: ChatProps['history'];
quoteQA: ChatProps['quoteQA']; quoteQA: ChatProps['quoteQA'];
model: ChatModelItemType; model: ChatModelItemType;
}) { }) {
// concat history quote
const historyQuote =
history[history.length - 1]?.responseData
?.find((item) => item.moduleName === ChatModuleEnum.AIChat)
?.quoteList?.filter((item) => !quoteQA.find((quote) => quote.id === item.id)) || [];
const concatQuote = quoteQA.concat(historyQuote.slice(0, 3));
const sliceResult = modelToolMap.tokenSlice({ const sliceResult = modelToolMap.tokenSlice({
model: model.model, model: model.model,
maxToken: model.quoteMaxToken, maxToken: model.quoteMaxToken,
messages: quoteQA.map((item, i) => ({ messages: concatQuote.map((item, i) => ({
obj: ChatRoleEnum.System, obj: ChatRoleEnum.System,
value: item.a ? `{instruction:${item.q},output:${item.a}}` : `{instruction:${item.q}}` value: item.a ? `{instruction:${item.q},output:${item.a}}` : `{instruction:${item.q}}`
})) }))
}); });
// slice filterSearch // slice filterSearch
const filterQuoteQA = quoteQA.slice(0, sliceResult.length); const filterQuoteQA = concatQuote.slice(0, sliceResult.length);
const quotePrompt = const quotePrompt =
filterQuoteQA.length > 0 filterQuoteQA.length > 0

View File

@ -9,7 +9,6 @@ import { PgTrainingTableName } from '@/constants/plugin';
type KBSearchProps = { type KBSearchProps = {
kbList: SelectedKbType; kbList: SelectedKbType;
history: ChatItemType[];
similarity: number; similarity: number;
limit: number; limit: number;
userChatInput: string; userChatInput: string;
@ -22,13 +21,7 @@ export type KBSearchResponse = {
}; };
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> { export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
const { const { kbList = [], similarity = 0.8, limit = 5, userChatInput } = props as KBSearchProps;
kbList = [],
history = [],
similarity = 0.8,
limit = 5,
userChatInput
} = props as KBSearchProps;
if (kbList.length === 0) { if (kbList.length === 0) {
return Promise.reject("You didn't choose the knowledge base"); return Promise.reject("You didn't choose the knowledge base");