perf: chat completion api
This commit is contained in:
parent
2f614ac40d
commit
c26be2e885
@ -31,6 +31,7 @@ export const streamFetch = ({
|
|||||||
signal: abortSignal.signal,
|
signal: abortSignal.signal,
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
...data,
|
...data,
|
||||||
|
detail: true,
|
||||||
stream: true
|
stream: true
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|||||||
@ -54,7 +54,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
history: gptMessage2ChatType(history),
|
history: gptMessage2ChatType(history),
|
||||||
userChatInput: prompt
|
userChatInput: prompt
|
||||||
},
|
},
|
||||||
stream: true
|
stream: true,
|
||||||
|
detail: true
|
||||||
});
|
});
|
||||||
|
|
||||||
sseResponse({
|
sseResponse({
|
||||||
|
|||||||
@ -41,6 +41,7 @@ export type Props = CreateChatCompletionRequest &
|
|||||||
FastGptShareChatProps & {
|
FastGptShareChatProps & {
|
||||||
messages: MessageItemType[];
|
messages: MessageItemType[];
|
||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
|
detail?: boolean;
|
||||||
variables: Record<string, any>;
|
variables: Record<string, any>;
|
||||||
};
|
};
|
||||||
export type ChatResponseType = {
|
export type ChatResponseType = {
|
||||||
@ -57,7 +58,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
res.end();
|
res.end();
|
||||||
});
|
});
|
||||||
|
|
||||||
let { chatId, appId, shareId, stream = false, messages = [], variables = {} } = req.body as Props;
|
let {
|
||||||
|
chatId,
|
||||||
|
appId,
|
||||||
|
shareId,
|
||||||
|
stream = false,
|
||||||
|
detail = false,
|
||||||
|
messages = [],
|
||||||
|
variables = {}
|
||||||
|
} = req.body as Props;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!messages) {
|
if (!messages) {
|
||||||
@ -133,7 +142,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
history: prompts,
|
history: prompts,
|
||||||
userChatInput: prompt.value
|
userChatInput: prompt.value
|
||||||
},
|
},
|
||||||
stream
|
stream,
|
||||||
|
detail
|
||||||
});
|
});
|
||||||
// console.log(responseData, '===', answerText);
|
// console.log(responseData, '===', answerText);
|
||||||
|
|
||||||
@ -176,7 +186,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
if (stream) {
|
if (stream) {
|
||||||
sseResponse({
|
sseResponse({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.answer,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: textAdaptGptResponse({
|
data: textAdaptGptResponse({
|
||||||
text: null,
|
text: null,
|
||||||
finish_reason: 'stop'
|
finish_reason: 'stop'
|
||||||
@ -184,11 +194,11 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
});
|
});
|
||||||
sseResponse({
|
sseResponse({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.answer,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: '[DONE]'
|
data: '[DONE]'
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isOwner) {
|
if (isOwner && detail) {
|
||||||
sseResponse({
|
sseResponse({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.appStreamResponse,
|
event: sseResponseEventEnum.appStreamResponse,
|
||||||
@ -199,7 +209,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
res.end();
|
res.end();
|
||||||
} else {
|
} else {
|
||||||
res.json({
|
res.json({
|
||||||
responseData,
|
...(detail ? { responseData } : {}),
|
||||||
id: chatId || '',
|
id: chatId || '',
|
||||||
model: '',
|
model: '',
|
||||||
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
|
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
|
||||||
@ -244,7 +254,8 @@ export async function dispatchModules({
|
|||||||
user,
|
user,
|
||||||
params = {},
|
params = {},
|
||||||
variables = {},
|
variables = {},
|
||||||
stream = false
|
stream = false,
|
||||||
|
detail = false
|
||||||
}: {
|
}: {
|
||||||
res: NextApiResponse;
|
res: NextApiResponse;
|
||||||
modules: AppModuleItemType[];
|
modules: AppModuleItemType[];
|
||||||
@ -252,6 +263,7 @@ export async function dispatchModules({
|
|||||||
params?: Record<string, any>;
|
params?: Record<string, any>;
|
||||||
variables?: Record<string, any>;
|
variables?: Record<string, any>;
|
||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
|
detail?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const runningModules = loadModules(modules, variables);
|
const runningModules = loadModules(modules, variables);
|
||||||
|
|
||||||
@ -322,7 +334,7 @@ export async function dispatchModules({
|
|||||||
if (res.closed) return Promise.resolve();
|
if (res.closed) return Promise.resolve();
|
||||||
console.log('run=========', module.flowType);
|
console.log('run=========', module.flowType);
|
||||||
|
|
||||||
if (stream && module.showStatus) {
|
if (stream && detail && module.showStatus) {
|
||||||
responseStatus({
|
responseStatus({
|
||||||
res,
|
res,
|
||||||
name: module.name,
|
name: module.name,
|
||||||
@ -338,6 +350,7 @@ export async function dispatchModules({
|
|||||||
const props: Record<string, any> = {
|
const props: Record<string, any> = {
|
||||||
res,
|
res,
|
||||||
stream,
|
stream,
|
||||||
|
detail,
|
||||||
userOpenaiAccount: user?.openaiAccount,
|
userOpenaiAccount: user?.openaiAccount,
|
||||||
...params
|
...params
|
||||||
};
|
};
|
||||||
|
|||||||
@ -25,6 +25,7 @@ export type ChatProps = {
|
|||||||
history?: ChatItemType[];
|
history?: ChatItemType[];
|
||||||
userChatInput: string;
|
userChatInput: string;
|
||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
|
detail?: boolean;
|
||||||
quoteQA?: QuoteItemType[];
|
quoteQA?: QuoteItemType[];
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
limitPrompt?: string;
|
limitPrompt?: string;
|
||||||
@ -44,6 +45,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
|||||||
temperature = 0,
|
temperature = 0,
|
||||||
maxToken = 4000,
|
maxToken = 4000,
|
||||||
stream = false,
|
stream = false,
|
||||||
|
detail = false,
|
||||||
history = [],
|
history = [],
|
||||||
quoteQA = [],
|
quoteQA = [],
|
||||||
userChatInput,
|
userChatInput,
|
||||||
@ -111,7 +113,11 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
|||||||
const { answerText, totalTokens, completeMessages } = await (async () => {
|
const { answerText, totalTokens, completeMessages } = await (async () => {
|
||||||
if (stream) {
|
if (stream) {
|
||||||
// sse response
|
// sse response
|
||||||
const { answer } = await streamResponse({ res, response });
|
const { answer } = await streamResponse({
|
||||||
|
res,
|
||||||
|
detail,
|
||||||
|
response
|
||||||
|
});
|
||||||
// count tokens
|
// count tokens
|
||||||
const completeMessages = filterMessages.concat({
|
const completeMessages = filterMessages.concat({
|
||||||
obj: ChatRoleEnum.AI,
|
obj: ChatRoleEnum.AI,
|
||||||
@ -282,7 +288,15 @@ function getMaxTokens({
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) {
|
async function streamResponse({
|
||||||
|
res,
|
||||||
|
detail,
|
||||||
|
response
|
||||||
|
}: {
|
||||||
|
res: NextApiResponse;
|
||||||
|
detail: boolean;
|
||||||
|
response: any;
|
||||||
|
}) {
|
||||||
let answer = '';
|
let answer = '';
|
||||||
let error: any = null;
|
let error: any = null;
|
||||||
const parseData = new SSEParseData();
|
const parseData = new SSEParseData();
|
||||||
@ -301,7 +315,7 @@ async function streamResponse({ res, response }: { res: NextApiResponse; respons
|
|||||||
|
|
||||||
sseResponse({
|
sseResponse({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.answer,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: textAdaptGptResponse({
|
data: textAdaptGptResponse({
|
||||||
text: content
|
text: content
|
||||||
})
|
})
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import type { NextApiResponse } from 'next';
|
|||||||
|
|
||||||
export type AnswerProps = {
|
export type AnswerProps = {
|
||||||
res: NextApiResponse;
|
res: NextApiResponse;
|
||||||
|
detail?: boolean;
|
||||||
text: string;
|
text: string;
|
||||||
stream: boolean;
|
stream: boolean;
|
||||||
};
|
};
|
||||||
@ -13,12 +14,12 @@ export type AnswerResponse = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
||||||
const { res, text = '', stream } = props as AnswerProps;
|
const { res, detail, text = '', stream } = props as AnswerProps;
|
||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
sseResponse({
|
sseResponse({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.answer,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: textAdaptGptResponse({
|
data: textAdaptGptResponse({
|
||||||
text: text.replace(/\\n/g, '\n')
|
text: text.replace(/\\n/g, '\n')
|
||||||
})
|
})
|
||||||
|
|||||||
@ -6,11 +6,11 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai';
|
|||||||
import { ChatRoleEnum } from '@/constants/chat';
|
import { ChatRoleEnum } from '@/constants/chat';
|
||||||
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
|
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
|
||||||
import type { AppModuleItemType } from '@/types/app';
|
import type { AppModuleItemType } from '@/types/app';
|
||||||
import type { FlowModuleItemType, FlowModuleTemplateType } from '@/types/flow';
|
import type { FlowModuleItemType } from '@/types/flow';
|
||||||
import type { Edge, Node } from 'reactflow';
|
import type { Edge, Node } from 'reactflow';
|
||||||
import { connectionLineStyle } from '@/constants/flow';
|
import { connectionLineStyle } from '@/constants/flow';
|
||||||
import { customAlphabet } from 'nanoid';
|
import { customAlphabet } from 'nanoid';
|
||||||
import { EmptyModule, ModuleTemplates, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
|
import { EmptyModule, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
|
||||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
|
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
|
||||||
|
|
||||||
export const adaptBill = (bill: BillSchema): UserBillType => {
|
export const adaptBill = (bill: BillSchema): UserBillType => {
|
||||||
@ -41,7 +41,7 @@ export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[]
|
|||||||
|
|
||||||
export const textAdaptGptResponse = ({
|
export const textAdaptGptResponse = ({
|
||||||
text,
|
text,
|
||||||
model,
|
model = '',
|
||||||
finish_reason = null,
|
finish_reason = null,
|
||||||
extraData = {}
|
extraData = {}
|
||||||
}: {
|
}: {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user