perf: bill

This commit is contained in:
archer 2023-07-13 22:53:44 +08:00
parent 726de0396b
commit f3715731c4
No known key found for this signature in database
GPG Key ID: 569A5660D2379E28
67 changed files with 915 additions and 1254 deletions

View File

@ -39,7 +39,6 @@ export const useAppRoute = (app) => {
userId: app.userId, userId: app.userId,
name: app.name, name: app.name,
intro: app.intro, intro: app.intro,
app: app.chat?.chatModel,
relatedKbs: kbNames, // 将relatedKbs的id转换为相应的Kb名称 relatedKbs: kbNames, // 将relatedKbs的id转换为相应的Kb名称
systemPrompt: app.chat?.systemPrompt || '', systemPrompt: app.chat?.systemPrompt || '',
temperature: app.chat?.temperature || 0, temperature: app.chat?.temperature || 0,

View File

@ -62,12 +62,6 @@ const appSchema = new mongoose.Schema({
avatar: String, avatar: String,
status: String, status: String,
intro: String, intro: String,
chat: {
relatedKbs: [mongoose.Schema.Types.ObjectId],
systemPrompt: String,
temperature: Number,
chatModel: String
},
share: { share: {
topNum: Number, topNum: Number,
isShare: Boolean, isShare: Boolean,

View File

@ -0,0 +1,26 @@
{
"Gpt35-4k": {
"model": "gpt-3.5-turbo",
"name": "Gpt35-4k",
"contextMaxToken": 4000,
"systemMaxToken": 2400,
"maxTemperature": 1.2,
"price": 1.5
},
"Gpt35-16k": {
"model": "gpt-3.5-turbo-16k",
"name": "Gpt35-16k",
"contextMaxToken": 16000,
"systemMaxToken": 8000,
"maxTemperature": 1.2,
"price": 3
},
"Gpt4": {
"model": "gpt-4",
"name": "Gpt4",
"contextMaxToken": 8000,
"systemMaxToken": 4000,
"maxTemperature": 1.2,
"price": 45
}
}

View File

@ -0,0 +1,8 @@
{
"Gpt35-16k": {
"model": "gpt-3.5-turbo-16k",
"name": "Gpt35-16k",
"maxToken": 16000,
"price": 3
}
}

View File

@ -0,0 +1,6 @@
{
"vectorMaxProcess": 10,
"qaMaxProcess": 10,
"pgIvfflatProbe": 10,
"sensitiveCheck": false
}

View File

@ -0,0 +1,7 @@
{
"text-embedding-ada-002": {
"model": "text-embedding-ada-002",
"name": "Embedding-2",
"price": 0.2
}
}

View File

@ -1,8 +0,0 @@
var _hmt = _hmt || [];
(function () {
const hm = document.createElement('script');
hm.src = 'https://hm.baidu.com/hm.js?a5357e9dab086658bac0b6faf148882e';
const s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(hm, s);
})();

View File

@ -2,7 +2,6 @@ import { GET, POST, PUT, DELETE } from '../request';
import type { KbItemType } from '@/types/plugin'; import type { KbItemType } from '@/types/plugin';
import { RequestPaging } from '@/types/index'; import { RequestPaging } from '@/types/index';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import { type QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
import { import {
Props as PushDataProps, Props as PushDataProps,
Response as PushDateResponse Response as PushDateResponse
@ -60,7 +59,7 @@ export const getTrainingData = (data: { kbId: string; init: boolean }) =>
}>(`/plugins/kb/data/getTrainingData`, data); }>(`/plugins/kb/data/getTrainingData`, data);
export const getKbDataItemById = (dataId: string) => export const getKbDataItemById = (dataId: string) =>
GET<QuoteItemType>(`/plugins/kb/data/getDataById`, { dataId }); GET(`/plugins/kb/data/getDataById`, { dataId });
/** /**
* push数据 * push数据

View File

@ -1,9 +1,6 @@
import { GET, POST, PUT } from './request'; import { GET, POST, PUT } from './request';
import type { ChatModelItemType } from '@/constants/model';
import type { InitDateResponse } from '@/pages/api/system/getInitData'; import type { InitDateResponse } from '@/pages/api/system/getInitData';
export const getInitData = () => GET<InitDateResponse>('/system/getInitData'); export const getInitData = () => GET<InitDateResponse>('/system/getInitData');
export const getSystemModelList = () => GET<ChatModelItemType[]>('/system/getModels');
export const uploadImg = (base64Img: string) => POST<string>('/system/uploadImage', { base64Img }); export const uploadImg = (base64Img: string) => POST<string>('/system/uploadImage', { base64Img });

View File

@ -10,7 +10,7 @@ import React, {
import { throttle } from 'lodash'; import { throttle } from 'lodash';
import { ChatItemType, ChatSiteItemType, ExportChatType } from '@/types/chat'; import { ChatItemType, ChatSiteItemType, ExportChatType } from '@/types/chat';
import { useToast } from '@/hooks/useToast'; import { useToast } from '@/hooks/useToast';
import { useCopyData, voiceBroadcast, hasVoiceApi } from '@/utils/tools'; import { useCopyData, voiceBroadcast, hasVoiceApi, getErrText } from '@/utils/tools';
import { Box, Card, Flex, Input, Textarea, Button, useTheme } from '@chakra-ui/react'; import { Box, Card, Flex, Input, Textarea, Button, useTheme } from '@chakra-ui/react';
import { useUserStore } from '@/store/user'; import { useUserStore } from '@/store/user';
@ -241,33 +241,34 @@ const ChatBox = (
variables: data variables: data
}); });
// 设置聊天内容为完成状态
setChatHistory((state) =>
state.map((item, index) => {
if (index !== state.length - 1) return item;
return {
...item,
status: 'finish'
};
})
);
setTimeout(() => { setTimeout(() => {
generatingScroll(); generatingScroll();
TextareaDom.current?.focus(); TextareaDom.current?.focus();
}, 100); }, 100);
} catch (err: any) { } catch (err: any) {
toast({ toast({
title: typeof err === 'string' ? err : err?.message || '聊天出错了~', title: getErrText(err, '聊天出错了~'),
status: 'warning', status: 'error',
duration: 5000, duration: 5000,
isClosable: true isClosable: true
}); });
resetInputVal(value); if (!err?.responseText) {
resetInputVal(value);
setChatHistory(newChatList.slice(0, newChatList.length - 2)); setChatHistory(newChatList.slice(0, newChatList.length - 2));
}
} }
// set finish status
setChatHistory((state) =>
state.map((item, index) => {
if (index !== state.length - 1) return item;
return {
...item,
status: 'finish'
};
})
);
}, },
[ [
isChatting, isChatting,
@ -404,7 +405,7 @@ const ChatBox = (
py={4} py={4}
_hover={{ _hover={{
'& .control': { '& .control': {
display: 'flex' display: item.status === 'finish' ? 'flex' : 'none'
} }
}} }}
> >

View File

@ -965,8 +965,8 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
name: '意图识别', name: '意图识别',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。', intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
type: 'http', type: 'http',
url: '/openapi/modules/agent/classifyQuestion', url: '/openapi/modules/agent/recognizeIntention',
flowType: 'classifyQuestionNode', flowType: 'recognizeIntention',
inputs: [ inputs: [
{ {
key: 'systemPrompt', key: 'systemPrompt',

View File

@ -1,12 +0,0 @@
export enum ChatModelEnum {
'GPT35' = 'gpt-3.5-turbo',
'GPT3516k' = 'gpt-3.5-turbo-16k',
'GPT4' = 'gpt-4',
'GPT432k' = 'gpt-4-32k'
}
export const chatModelList = [
{ label: 'Gpt35-16k', value: ChatModelEnum.GPT3516k },
{ label: 'Gpt35-4k', value: ChatModelEnum.GPT35 },
{ label: 'Gpt4-8k', value: ChatModelEnum.GPT4 }
];

View File

@ -1,7 +1,7 @@
import { AppModuleItemTypeEnum, SystemInputEnum, SpecificInputEnum } from '../app'; import { AppModuleItemTypeEnum, SystemInputEnum, SpecificInputEnum } from '../app';
import { FlowModuleTypeEnum, FlowInputItemTypeEnum, FlowOutputItemTypeEnum } from './index'; import { FlowModuleTypeEnum, FlowInputItemTypeEnum, FlowOutputItemTypeEnum } from './index';
import type { AppModuleTemplateItemType } from '@/types/app'; import type { AppModuleTemplateItemType } from '@/types/app';
import { chatModelList } from '../data'; import { chatModelList } from '@/store/static';
import { import {
Input_Template_History, Input_Template_History,
Input_Template_TFSwitch, Input_Template_TFSwitch,
@ -96,8 +96,8 @@ export const ChatModule: AppModuleTemplateItemType = {
key: 'model', key: 'model',
type: FlowInputItemTypeEnum.select, type: FlowInputItemTypeEnum.select,
label: '对话模型', label: '对话模型',
value: chatModelList[0].value, value: chatModelList[0]?.model,
list: chatModelList list: chatModelList.map((item) => ({ label: item.name, value: item.model }))
}, },
{ {
key: 'temperature', key: 'temperature',
@ -278,13 +278,13 @@ export const TFSwitchModule: AppModuleTemplateItemType = {
} }
] ]
}; };
export const ClassifyQuestionModule: AppModuleTemplateItemType = { export const RecognizeIntentionModule: AppModuleTemplateItemType = {
logo: '/imgs/module/cq.png', logo: '/imgs/module/cq.png',
name: '意图识别', name: '意图识别',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。', intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
type: AppModuleItemTypeEnum.http, type: AppModuleItemTypeEnum.http,
url: '/openapi/modules/agent/classifyQuestion', url: '/openapi/modules/agent/recognizeIntention',
flowType: FlowModuleTypeEnum.classifyQuestionNode, flowType: FlowModuleTypeEnum.recognizeIntention,
inputs: [ inputs: [
{ {
key: 'systemPrompt', key: 'systemPrompt',
@ -348,6 +348,6 @@ export const ModuleTemplates = [
}, },
{ {
label: 'Agent', label: 'Agent',
list: [ClassifyQuestionModule] list: [RecognizeIntentionModule]
} }
]; ];

View File

@ -26,7 +26,7 @@ export enum FlowModuleTypeEnum {
kbSearchNode = 'kbSearchNode', kbSearchNode = 'kbSearchNode',
tfSwitchNode = 'tfSwitchNode', tfSwitchNode = 'tfSwitchNode',
answerNode = 'answerNode', answerNode = 'answerNode',
classifyQuestionNode = 'classifyQuestionNode' recognizeIntention = 'recognizeIntention'
} }
export const edgeOptions = { export const edgeOptions = {

View File

@ -1,11 +1,6 @@
import { getSystemModelList } from '@/api/system';
import type { ShareChatEditType } from '@/types/app'; import type { ShareChatEditType } from '@/types/app';
import type { AppSchema } from '@/types/mongoSchema'; import type { AppSchema } from '@/types/mongoSchema';
export const embeddingModel = 'text-embedding-ada-002';
export const embeddingPrice = 0.1;
export type EmbeddingModelType = 'text-embedding-ada-002';
export enum OpenAiChatEnum { export enum OpenAiChatEnum {
'GPT35' = 'gpt-3.5-turbo', 'GPT35' = 'gpt-3.5-turbo',
'GPT3516k' = 'gpt-3.5-turbo-16k', 'GPT3516k' = 'gpt-3.5-turbo-16k',
@ -13,58 +8,6 @@ export enum OpenAiChatEnum {
'GPT432k' = 'gpt-4-32k' 'GPT432k' = 'gpt-4-32k'
} }
export type ChatModelType = `${OpenAiChatEnum}`;
export type ChatModelItemType = {
chatModel: ChatModelType;
name: string;
contextMaxToken: number;
systemMaxToken: number;
maxTemperature: number;
price: number;
};
export const ChatModelMap = {
[OpenAiChatEnum.GPT35]: {
chatModel: OpenAiChatEnum.GPT35,
name: 'Gpt35-4k',
contextMaxToken: 4000,
systemMaxToken: 2400,
maxTemperature: 1.2,
price: 1.5
},
[OpenAiChatEnum.GPT3516k]: {
chatModel: OpenAiChatEnum.GPT3516k,
name: 'Gpt35-16k',
contextMaxToken: 16000,
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 3
},
[OpenAiChatEnum.GPT4]: {
chatModel: OpenAiChatEnum.GPT4,
name: 'Gpt4',
contextMaxToken: 8000,
systemMaxToken: 4000,
maxTemperature: 1.2,
price: 45
},
[OpenAiChatEnum.GPT432k]: {
chatModel: OpenAiChatEnum.GPT432k,
name: 'Gpt4-32k',
contextMaxToken: 32000,
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 90
}
};
export const chatModelList: ChatModelItemType[] = [
ChatModelMap[OpenAiChatEnum.GPT3516k],
ChatModelMap[OpenAiChatEnum.GPT35],
ChatModelMap[OpenAiChatEnum.GPT4]
];
export const defaultApp: AppSchema = { export const defaultApp: AppSchema = {
_id: '', _id: '',
userId: 'userId', userId: 'userId',
@ -72,17 +15,6 @@ export const defaultApp: AppSchema = {
avatar: '/icon/logo.png', avatar: '/icon/logo.png',
intro: '', intro: '',
updateTime: Date.now(), updateTime: Date.now(),
chat: {
relatedKbs: [],
searchSimilarity: 0.2,
searchLimit: 5,
searchEmptyText: '',
systemPrompt: '',
limitPrompt: '',
temperature: 0,
maxToken: 4000,
chatModel: OpenAiChatEnum.GPT35
},
share: { share: {
isShare: false, isShare: false,
isShareDetail: false, isShareDetail: false,

View File

@ -1,9 +1,6 @@
export enum BillTypeEnum { export enum BillSourceEnum {
chat = 'chat', fastgpt = 'fastgpt',
openapiChat = 'openapiChat', api = 'api'
QA = 'QA',
vector = 'vector',
return = 'return'
} }
export enum PageTypeEnum { export enum PageTypeEnum {
login = 'login', login = 'login',
@ -11,12 +8,9 @@ export enum PageTypeEnum {
forgetPassword = 'forgetPassword' forgetPassword = 'forgetPassword'
} }
export const BillTypeMap: Record<`${BillTypeEnum}`, string> = { export const BillSourceMap: Record<`${BillSourceEnum}`, string> = {
[BillTypeEnum.chat]: '对话', [BillSourceEnum.fastgpt]: 'FastGpt 平台',
[BillTypeEnum.openapiChat]: 'api 对话', [BillSourceEnum.api]: 'Api'
[BillTypeEnum.QA]: 'QA拆分',
[BillTypeEnum.vector]: '索引生成',
[BillTypeEnum.return]: '退款'
}; };
export enum PromotionEnum { export enum PromotionEnum {

View File

@ -1,4 +1,4 @@
import { useRef, useState, useCallback, useLayoutEffect, useMemo, useEffect } from 'react'; import { useRef, useState, useCallback, useMemo, useEffect } from 'react';
import type { PagingData } from '../types/index'; import type { PagingData } from '../types/index';
import { IconButton, Flex, Box, Input } from '@chakra-ui/react'; import { IconButton, Flex, Box, Input } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons'; import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
@ -144,7 +144,7 @@ export const usePagination = <T = any,>({
[data.length, isLoading, mutate, pageNum, total] [data.length, isLoading, mutate, pageNum, total]
); );
useLayoutEffect(() => { useEffect(() => {
if (!elementRef.current || type !== 'scroll') return; if (!elementRef.current || type !== 'scroll') return;
const scrolling = throttle((e: Event) => { const scrolling = throttle((e: Event) => {

View File

@ -1,4 +1,4 @@
import { useEffect } from 'react'; import { useEffect, useState } from 'react';
import type { AppProps } from 'next/app'; import type { AppProps } from 'next/app';
import Script from 'next/script'; import Script from 'next/script';
import Head from 'next/head'; import Head from 'next/head';
@ -8,9 +8,9 @@ import { theme } from '@/constants/theme';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import NProgress from 'nprogress'; //nprogress module import NProgress from 'nprogress'; //nprogress module
import Router from 'next/router'; import Router from 'next/router';
import { useGlobalStore } from '@/store/global';
import 'nprogress/nprogress.css'; import 'nprogress/nprogress.css';
import '@/styles/reset.scss'; import '@/styles/reset.scss';
import { clientInitData } from '@/store/static';
//Binding events. //Binding events.
Router.events.on('routeChangeStart', () => NProgress.start()); Router.events.on('routeChangeStart', () => NProgress.start());
@ -29,13 +29,15 @@ const queryClient = new QueryClient({
}); });
function App({ Component, pageProps }: AppProps) { function App({ Component, pageProps }: AppProps) {
const { const [googleVerKey, setGoogleVerKey] = useState<string>();
loadInitData, const [baiduTongji, setBaiduTongji] = useState<string>();
initData: { googleVerKey, baiduTongji }
} = useGlobalStore();
useEffect(() => { useEffect(() => {
loadInitData(); (async () => {
const { googleVerKey, baiduTongji } = await clientInitData();
setGoogleVerKey(googleVerKey);
setBaiduTongji(baiduTongji);
})();
}, []); }, []);
return ( return (
@ -53,7 +55,7 @@ function App({ Component, pageProps }: AppProps) {
<Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script> <Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script>
<Script src="/js/pdf.js" strategy="lazyOnload"></Script> <Script src="/js/pdf.js" strategy="lazyOnload"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script> <Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
{baiduTongji && <Script src="/js/baidutongji.js" strategy="lazyOnload"></Script>} {baiduTongji && <Script src={baiduTongji} strategy="lazyOnload"></Script>}
{googleVerKey && ( {googleVerKey && (
<> <>
<Script <Script
@ -75,5 +77,4 @@ function App({ Component, pageProps }: AppProps) {
); );
} }
// @ts-ignore
export default App; export default App;

View File

@ -8,6 +8,8 @@ import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app'; import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions'; import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt'; import { gptMessage2ChatType } from '@/utils/adapt';
import { createTaskBill, delTaskBill, finishTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string }; export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
export type Props = { export type Props = {
@ -15,10 +17,8 @@ export type Props = {
prompt: string; prompt: string;
modules: AppModuleItemType[]; modules: AppModuleItemType[];
variables: Record<string, any>; variables: Record<string, any>;
}; appId: string;
export type ChatResponseType = { appName: string;
newChatId: string;
quoteLen?: number;
}; };
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@ -30,8 +30,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
res.end(); res.end();
}); });
let { modules = [], history = [], prompt, variables = {} } = req.body as Props; let { modules = [], history = [], prompt, variables = {}, appName, appId } = req.body as Props;
let billId = '';
try { try {
if (!history || !modules || !prompt) { if (!history || !modules || !prompt) {
throw new Error('Prams Error'); throw new Error('Prams Error');
@ -45,6 +45,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
/* user auth */ /* user auth */
const { userId } = await authUser({ req }); const { userId } = await authUser({ req });
billId = await createTaskBill({
userId,
appName,
appId,
source: BillSourceEnum.fastgpt
});
/* start process */ /* start process */
const { responseData } = await dispatchModules({ const { responseData } = await dispatchModules({
res, res,
@ -54,7 +61,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
history: gptMessage2ChatType(history), history: gptMessage2ChatType(history),
userChatInput: prompt userChatInput: prompt
}, },
stream: true stream: true,
billId
}); });
sseResponse({ sseResponse({
@ -70,7 +78,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
res.end(); res.end();
// bill // bill
finishTaskBill({
billId
});
} catch (err: any) { } catch (err: any) {
delTaskBill(billId);
res.status(500); res.status(500);
sseErrRes(res, err); sseErrRes(res, err);
res.end(); res.end();

View File

@ -53,14 +53,6 @@ export async function saveChat({
await connectToDatabase(); await connectToDatabase();
const { app } = await authApp({ appId, userId, authOwner: false }); const { app } = await authApp({ appId, userId, authOwner: false });
const content = prompts.map((item) => ({
_id: item._id,
obj: item.obj,
value: item.value,
systemPrompt: item.systemPrompt || '',
quote: item.quote || []
}));
if (String(app.userId) === userId) { if (String(app.userId) === userId) {
await App.findByIdAndUpdate(appId, { await App.findByIdAndUpdate(appId, {
updateTime: new Date() updateTime: new Date()
@ -73,12 +65,11 @@ export async function saveChat({
Chat.findByIdAndUpdate(historyId, { Chat.findByIdAndUpdate(historyId, {
$push: { $push: {
content: { content: {
$each: content $each: prompts
} }
}, },
variables, variables,
title: content[0].value.slice(0, 20), title: prompts[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date() updateTime: new Date()
}).then(() => ({ }).then(() => ({
newHistoryId: '' newHistoryId: ''
@ -90,9 +81,8 @@ export async function saveChat({
userId, userId,
appId, appId,
variables, variables,
content, content: prompts,
title: content[0].value.slice(0, 20), title: prompts[0].value.slice(0, 20)
latestChat: content[1].value
}).then((res) => ({ }).then((res) => ({
newHistoryId: String(res._id) newHistoryId: String(res._id)
})) }))

View File

@ -1,186 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemType } from '@/types/chat';
import type { AppSchema } from '@/types/mongoSchema';
import { authApp } from '@/service/utils/auth';
import { ChatModelMap } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
import { openaiEmbedding } from '../plugin/openaiEmbedding';
import { modelToolMap } from '@/utils/plugin';
export type QuoteItemType = {
id: string;
q: string;
a: string;
source?: string;
};
type Props = {
prompts: ChatItemType[];
similarity: number;
limit: number;
appId: string;
};
type Response = {
rawSearch: QuoteItemType[];
userSystemPrompt: {
obj: ChatRoleEnum;
value: string;
}[];
userLimitPrompt: {
obj: ChatRoleEnum;
value: string;
}[];
quotePrompt: {
obj: ChatRoleEnum;
value: string;
};
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
if (!userId) {
throw new Error('userId is empty');
}
const { prompts, similarity, limit, appId } = req.body as Props;
if (!similarity || !Array.isArray(prompts) || !appId) {
throw new Error('params is error');
}
// auth app
const { app } = await authApp({
appId,
userId
});
const result = await appKbSearch({
app,
userId,
fixedQuote: [],
prompt: prompts[prompts.length - 1],
similarity,
limit
});
jsonRes<Response>(res, {
data: result
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function appKbSearch({
app,
userId,
fixedQuote = [],
prompt,
similarity = 0.8,
limit = 5
}: {
app: AppSchema;
userId: string;
fixedQuote?: QuoteItemType[];
prompt: ChatItemType;
similarity: number;
limit: number;
}): Promise<Response> {
const modelConstantsData = ChatModelMap[app.chat.chatModel];
// get vector
const promptVector = await openaiEmbedding({
userId,
input: [prompt.value]
});
// search kb
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select id,q,a,source from modelData where kb_id IN (${app.chat.relatedKbs
.map((item) => `'${item}'`)
.join(',')}) AND vector <#> '[${promptVector[0]}]' < -${similarity} order by vector <#> '[${
promptVector[0]
}]' limit ${limit};
COMMIT;`
);
const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
// filter same search result
const idSet = new Set<string>();
const filterSearch = [
...searchRes.slice(0, 3),
...fixedQuote.slice(0, 2),
...searchRes.slice(3),
...fixedQuote.slice(2, Math.floor(fixedQuote.length * 0.4))
].filter((item) => {
if (idSet.has(item.id)) {
return false;
}
idSet.add(item.id);
return true;
});
// 计算固定提示词的 token 数量
const userSystemPrompt = app.chat.systemPrompt // user system prompt
? [
{
obj: ChatRoleEnum.System,
value: app.chat.systemPrompt
}
]
: [];
const userLimitPrompt = [
{
obj: ChatRoleEnum.Human,
value: app.chat.limitPrompt
? app.chat.limitPrompt
: `知识库是关于 ${app.name} 的内容,参考知识库回答问题。与 "${app.name}" 无关内容,直接回复: "我不知道"。`
}
];
const fixedSystemTokens = modelToolMap.countTokens({
model: app.chat.chatModel,
messages: [...userSystemPrompt, ...userLimitPrompt]
});
// filter part quote by maxToken
const sliceResult = modelToolMap
.tokenSlice({
model: app.chat.chatModel,
maxToken: modelConstantsData.systemMaxToken - fixedSystemTokens,
messages: filterSearch.map((item, i) => ({
obj: ChatRoleEnum.System,
value: `${i + 1}: [${item.q}\n${item.a}]`
}))
})
.map((item) => item.value)
.join('\n')
.trim();
// slice filterSearch
const rawSearch = filterSearch.slice(0, sliceResult.length);
const quoteText = sliceResult ? `知识库:\n${sliceResult}` : '';
return {
rawSearch,
userSystemPrompt,
userLimitPrompt,
quotePrompt: {
obj: ChatRoleEnum.System,
value: quoteText
}
};
}

View File

@ -15,6 +15,7 @@ type DateItemType = { a: string; q: string; source?: string };
export type Props = { export type Props = {
kbId: string; kbId: string;
data: DateItemType[]; data: DateItemType[];
model: string;
mode: `${TrainingModeEnum}`; mode: `${TrainingModeEnum}`;
prompt?: string; prompt?: string;
}; };
@ -25,14 +26,14 @@ export type Response = {
const modeMaxToken = { const modeMaxToken = {
[TrainingModeEnum.index]: 6000, [TrainingModeEnum.index]: 6000,
[TrainingModeEnum.qa]: 10000 [TrainingModeEnum.qa]: 12000
}; };
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) { export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try { try {
const { kbId, data, mode, prompt } = req.body as Props; const { kbId, data, mode, prompt, model } = req.body as Props;
if (!kbId || !Array.isArray(data)) { if (!kbId || !Array.isArray(data) || !model) {
throw new Error('缺少参数'); throw new Error('缺少参数');
} }
await connectToDatabase(); await connectToDatabase();
@ -46,7 +47,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
data, data,
userId, userId,
mode, mode,
prompt prompt,
model
}) })
}); });
} catch (err) { } catch (err) {
@ -62,7 +64,8 @@ export async function pushDataToKb({
kbId, kbId,
data, data,
mode, mode,
prompt prompt,
model
}: { userId: string } & Props): Promise<Response> { }: { userId: string } & Props): Promise<Response> {
await authKb({ await authKb({
userId, userId,
@ -79,7 +82,7 @@ export async function pushDataToKb({
if (mode === TrainingModeEnum.qa) { if (mode === TrainingModeEnum.qa) {
// count token // count token
const token = modelToolMap.countTokens({ const token = modelToolMap.countTokens({
model: OpenAiChatEnum.GPT3516k, model: 'gpt-3.5-turbo-16k',
messages: [{ obj: 'System', value: item.q }] messages: [{ obj: 'System', value: item.q }]
}); });
if (token > modeMaxToken[TrainingModeEnum.qa]) { if (token > modeMaxToken[TrainingModeEnum.qa]) {
@ -144,6 +147,7 @@ export async function pushDataToKb({
insertData.map((item) => ({ insertData.map((item) => ({
q: item.q, q: item.q,
a: item.a, a: item.a,
model,
source: item.source, source: item.source,
userId, userId,
kbId, kbId,

View File

@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg'; import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import { openaiEmbedding } from '../plugin/openaiEmbedding'; import { getVector } from '../plugin/vector';
import type { KbTestItemType } from '@/types/plugin'; import type { KbTestItemType } from '@/types/plugin';
export type Props = { export type Props = {
@ -27,7 +27,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
throw new Error('缺少用户ID'); throw new Error('缺少用户ID');
} }
const vector = await openaiEmbedding({ const vector = await getVector({
userId, userId,
input: [text] input: [text]
}); });

View File

@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg'; import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import { openaiEmbedding } from '../plugin/openaiEmbedding'; import { getVector } from '../plugin/vector';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) { export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try { try {
@ -19,7 +19,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// get vector // get vector
const vector = await (async () => { const vector = await (async () => {
if (q) { if (q) {
return openaiEmbedding({ return getVector({
userId, userId,
input: [q] input: [q]
}); });

View File

@ -6,12 +6,12 @@ import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai'; import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app'; import type { RecognizeIntentionAgentItemType } from '@/types/app';
export type Props = { export type Props = {
history?: ChatItemType[]; history?: ChatItemType[];
userChatInput: string; userChatInput: string;
agents: ClassifyQuestionAgentItemType[]; agents: RecognizeIntentionAgentItemType[];
description: string; description: string;
}; };
export type Response = { history: ChatItemType[] }; export type Response = { history: ChatItemType[] };

View File

@ -6,29 +6,30 @@ import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai'; import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app'; import type { RecognizeIntentionAgentItemType } from '@/types/app';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
export type Props = { export type Props = {
systemPrompt?: string; systemPrompt?: string;
history?: ChatItemType[]; history?: ChatItemType[];
userChatInput: string; userChatInput: string;
agents: ClassifyQuestionAgentItemType[]; agents: RecognizeIntentionAgentItemType[];
billId?: string;
}; };
export type Response = { history: ChatItemType[] }; export type Response = { history: ChatItemType[] };
const agentModel = 'gpt-3.5-turbo-16k'; const agentModel = 'gpt-3.5-turbo';
const agentFunName = 'agent_user_question'; const agentFunName = 'agent_user_question';
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
let { systemPrompt, agents, history = [], userChatInput } = req.body as Props; let { userChatInput } = req.body as Props;
const response = await classifyQuestion({ if (!userChatInput) {
systemPrompt, throw new Error('userChatInput is empty');
history, }
userChatInput,
agents const response = await classifyQuestion(req.body);
});
jsonRes(res, { jsonRes(res, {
data: response data: response
@ -46,7 +47,8 @@ export async function classifyQuestion({
agents, agents,
systemPrompt, systemPrompt,
history = [], history = [],
userChatInput userChatInput,
billId
}: Props) { }: Props) {
const messages: ChatItemType[] = [ const messages: ChatItemType[] = [
...(systemPrompt ...(systemPrompt
@ -106,8 +108,19 @@ export async function classifyQuestion({
if (!arg.type) { if (!arg.type) {
throw new Error(''); throw new Error('');
} }
const totalTokens = response.data.usage?.total_tokens || 0;
await pushTaskBillListItem({
billId,
moduleName: 'Recognize Intention',
amount: countModelPrice({ model: agentModel, tokens: totalTokens }),
model: agentModel,
tokenLen: totalTokens
});
console.log( console.log(
'意图结果', 'CQ',
agents.findIndex((item) => item.key === arg.type) agents.findIndex((item) => item.key === arg.type)
); );

View File

@ -1,9 +1,9 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction // Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response'; import { jsonRes, sseErrRes } from '@/service/response';
import { sseResponse } from '@/service/utils/tools'; import { sseResponse } from '@/service/utils/tools';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI } from '@/utils/plugin/openai'; import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index'; import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
@ -11,6 +11,8 @@ import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt'; import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai'; import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import { SpecificInputEnum } from '@/constants/app'; import { SpecificInputEnum } from '@/constants/app';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
export type Props = { export type Props = {
model: `${OpenAiChatEnum}`; model: `${OpenAiChatEnum}`;
@ -22,39 +24,28 @@ export type Props = {
quotePrompt?: string; quotePrompt?: string;
systemPrompt?: string; systemPrompt?: string;
limitPrompt?: string; limitPrompt?: string;
billId?: string;
}; };
export type Response = { [SpecificInputEnum.answerText]: string }; export type Response = { [SpecificInputEnum.answerText]: string; totalTokens: number };
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let { model, temperature = 0, stream } = req.body as Props;
try { try {
let {
model,
stream = false,
temperature = 0,
maxToken = 4000,
history = [],
quotePrompt,
userChatInput,
systemPrompt,
limitPrompt
} = req.body as Props;
// temperature adapt // temperature adapt
const modelConstantsData = ChatModelMap[model]; const modelConstantsData = getChatModel(model);
if (!modelConstantsData) {
throw new Error('The chat model is undefined');
}
// FastGpt temperature range: 1~10 // FastGpt temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2); temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
const response = await chatCompletion({ const response = await chatCompletion({
...req.body,
res, res,
model, model,
temperature, temperature
maxToken,
stream,
history,
userChatInput,
systemPrompt,
limitPrompt,
quotePrompt
}); });
if (stream) { if (stream) {
@ -70,25 +61,32 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}); });
} }
} catch (err) { } catch (err) {
jsonRes(res, { if (stream) {
code: 500, res.status(500);
error: err sseErrRes(res, err);
}); res.end();
} else {
jsonRes(res, {
code: 500,
error: err
});
}
} }
} }
/* request openai chat */ /* request openai chat */
export async function chatCompletion({ export async function chatCompletion({
res, res,
model = OpenAiChatEnum.GPT35, model,
temperature, temperature = 0,
maxToken = 4000, maxToken = 4000,
stream, stream = false,
history = [], history = [],
quotePrompt, quotePrompt = '',
userChatInput, userChatInput,
systemPrompt, systemPrompt = '',
limitPrompt limitPrompt = '',
billId
}: Props & { res: NextApiResponse }): Promise<Response> { }: Props & { res: NextApiResponse }): Promise<Response> {
const messages: ChatItemType[] = [ const messages: ChatItemType[] = [
...(quotePrompt ...(quotePrompt
@ -121,7 +119,7 @@ export async function chatCompletion({
value: userChatInput value: userChatInput
} }
]; ];
const modelTokenLimit = ChatModelMap[model]?.contextMaxToken || 4000; const modelTokenLimit = getChatModel(model)?.contextMaxToken || 4000;
const filterMessages = ChatContextFilter({ const filterMessages = ChatContextFilter({
model, model,
@ -157,37 +155,47 @@ export async function chatCompletion({
} }
); );
const { answer } = await (async () => { const { answer, totalTokens } = await (async () => {
if (stream) { if (stream) {
// sse response // sse response
const { answer } = await streamResponse({ res, response }); const { answer } = await streamResponse({ res, response });
// count tokens // count tokens
// const finishMessages = filterMessages.concat({ const finishMessages = filterMessages.concat({
// obj: ChatRoleEnum.AI, obj: ChatRoleEnum.AI,
// value: answer value: answer
// }); });
// const totalTokens = modelToolMap[model].countTokens({ const totalTokens = countOpenAIToken({
// messages: finishMessages messages: finishMessages,
// }); model: 'gpt-3.5-turbo-16k'
});
return { return {
answer answer,
// totalTokens totalTokens
}; };
} else { } else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || ''; const answer = stream ? '' : response.data.choices?.[0].message?.content || '';
// const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0; const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
return { return {
answer answer,
// totalTokens totalTokens
}; };
} }
})(); })();
await pushTaskBillListItem({
billId,
moduleName: 'AI Chat',
amount: countModelPrice({ model, tokens: totalTokens }),
model,
tokenLen: totalTokens
});
return { return {
answerText: answer answerText: answer,
totalTokens
}; };
} }

View File

@ -4,8 +4,9 @@ import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { openaiEmbedding_system } from '../../plugin/openaiEmbedding';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import { getVector } from '../../plugin/vector';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
export type QuoteItemType = { export type QuoteItemType = {
id: string; id: string;
@ -21,6 +22,7 @@ type Props = {
maxToken: number; maxToken: number;
userChatInput: string; userChatInput: string;
stream?: boolean; stream?: boolean;
billId?: string;
}; };
type Response = { type Response = {
rawSearch: QuoteItemType[]; rawSearch: QuoteItemType[];
@ -30,25 +32,15 @@ type Response = {
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) { export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try { try {
const { const { kb_ids = [], userChatInput } = req.body as Props;
kb_ids = [],
history = [],
similarity,
limit,
maxToken,
userChatInput
} = req.body as Props;
if (!similarity || !Array.isArray(kb_ids)) { if (!userChatInput || !Array.isArray(kb_ids)) {
throw new Error('params is error'); throw new Error('params is error');
} }
const result = await kbSearch({ const result = await kbSearch({
...req.body,
kb_ids, kb_ids,
history,
similarity,
limit,
maxToken,
userChatInput userChatInput
}); });
@ -70,7 +62,8 @@ export async function kbSearch({
similarity = 0.8, similarity = 0.8,
limit = 5, limit = 5,
maxToken = 2500, maxToken = 2500,
userChatInput userChatInput,
billId
}: Props): Promise<Response> { }: Props): Promise<Response> {
if (kb_ids.length === 0) if (kb_ids.length === 0)
return { return {
@ -78,22 +71,34 @@ export async function kbSearch({
rawSearch: [], rawSearch: [],
quotePrompt: undefined quotePrompt: undefined
}; };
// get vector // get vector
const promptVector = await openaiEmbedding_system({ const vectorModel = global.vectorModels[0].model;
const { vectors, tokenLen } = await getVector({
model: vectorModel,
input: [userChatInput] input: [userChatInput]
}); });
// search kb // search kb
const res: any = await PgClient.query( const [res]: any = await Promise.all([
`BEGIN; PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10}; SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select id,q,a,source from modelData where kb_id IN (${kb_ids select id,q,a,source from modelData where kb_id IN (${kb_ids
.map((item) => `'${item}'`) .map((item) => `'${item}'`)
.join(',')}) AND vector <#> '[${promptVector[0]}]' < -${similarity} order by vector <#> '[${ .join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
promptVector[0] vectors[0]
}]' limit ${limit}; }]' limit ${limit};
COMMIT;` COMMIT;`
); ),
pushTaskBillListItem({
billId,
moduleName: 'Vector Generate',
amount: countModelPrice({ model: vectorModel, tokens: tokenLen }),
model: vectorModel,
tokenLen
})
]);
const searchRes: QuoteItemType[] = res?.[2]?.rows || []; const searchRes: QuoteItemType[] = res?.[2]?.rows || [];

View File

@ -1,115 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser, getApiKey, getSystemOpenAiKey } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model';
import { axiosConfig } from '@/service/utils/tools';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
import { OpenAiChatEnum } from '@/constants/model';
type Props = {
input: string[];
};
type Response = number[][];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, mustPay: true })
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function openaiEmbedding({
userId,
input,
mustPay = false
}: { userId: string; mustPay?: boolean } & Props) {
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: 'gpt-3.5-turbo',
userId,
mustPay
});
const apiKey = userOpenAiKey || systemAuthKey;
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input
},
{
timeout: 60000,
...axiosConfig(apiKey)
}
)
.then((res) => {
if (!res.data?.usage?.total_tokens) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
};
});
pushGenerateVectorBill({
isPay: !userOpenAiKey,
userId,
text: input.join(''),
tokenLen: result.tokenLen
});
return result.vectors;
}
export async function openaiEmbedding_system({ input }: Props) {
const apiKey = getSystemOpenAiKey();
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input
},
{
timeout: 20000,
...axiosConfig(apiKey)
}
)
.then((res) => {
if (!res.data?.usage?.total_tokens) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
};
});
return result.vectors;
}

View File

@ -0,0 +1,79 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authBalanceByUid, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
type Props = {
model: string;
input: string[];
};
type Response = {
tokenLen: number;
vectors: number[][];
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input, model } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await getVector({ userId, input, model })
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function getVector({
model = 'text-embedding-ada-002',
userId,
input
}: { userId?: string } & Props) {
userId && (await authBalanceByUid(userId));
// 获取 chatAPI
const chatAPI = getOpenAIApi();
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
{
model,
input
},
{
timeout: 60000,
...axiosConfig()
}
)
.then((res) => {
if (!res.data?.usage?.total_tokens) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
};
});
userId &&
pushGenerateVectorBill({
userId,
tokenLen: result.tokenLen,
model
});
return result;
}

View File

@ -15,8 +15,8 @@ import { Types } from 'mongoose';
import { moduleFetch } from '@/service/api/request'; import { moduleFetch } from '@/service/api/request';
import { AppModuleItemType, RunningModuleItemType } from '@/types/app'; import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
import { FlowInputItemTypeEnum } from '@/constants/flow'; import { FlowInputItemTypeEnum } from '@/constants/flow';
import { pushChatBill } from '@/service/events/pushBill'; import { finishTaskBill, createTaskBill } from '@/service/events/pushBill';
import { BillTypeEnum } from '@/constants/user'; import { BillSourceEnum } from '@/constants/user';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string }; export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = { type FastGptWebChatProps = {
@ -108,6 +108,13 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.setHeader('newHistoryId', String(newHistoryId)); res.setHeader('newHistoryId', String(newHistoryId));
} }
const billId = await createTaskBill({
userId,
appName: app.name,
appId,
source: BillSourceEnum.fastgpt
});
/* start process */ /* start process */
const { responseData, answerText } = await dispatchModules({ const { responseData, answerText } = await dispatchModules({
res, res,
@ -117,7 +124,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
history: prompts, history: prompts,
userChatInput: prompt.value userChatInput: prompt.value
}, },
stream stream,
billId: ''
}); });
// save chat // save chat
@ -171,14 +179,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
}); });
} }
pushChatBill({ // bill
isPay: true, finishTaskBill({
chatModel: 'gpt-3.5-turbo', billId
userId,
appId,
textLen: 1,
tokens: 100,
type: BillTypeEnum.chat
}); });
} catch (err: any) { } catch (err: any) {
if (stream) { if (stream) {
@ -199,18 +202,21 @@ export async function dispatchModules({
modules, modules,
params = {}, params = {},
variables = {}, variables = {},
stream = false stream = false,
billId
}: { }: {
res: NextApiResponse; res: NextApiResponse;
modules: AppModuleItemType[]; modules: AppModuleItemType[];
params?: Record<string, any>; params?: Record<string, any>;
variables?: Record<string, any>; variables?: Record<string, any>;
billId: string;
stream?: boolean; stream?: boolean;
}) { }) {
const runningModules = loadModules(modules, variables); const runningModules = loadModules(modules, variables);
let storeData: Record<string, any> = {};
let responseData: Record<string, any> = {}; let storeData: Record<string, any> = {}; // after module used
let answerText = ''; let responseData: Record<string, any> = {}; // response request and save to database
let answerText = ''; // AI answer
function pushStore({ function pushStore({
isResponse = false, isResponse = false,
@ -327,6 +333,7 @@ export async function dispatchModules({
}); });
const data = { const data = {
stream, stream,
billId,
...params ...params
}; };

View File

@ -1,19 +1,114 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction // Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import {
type QAModelItemType,
type ChatModelItemType,
type VectorModelItemType
} from '@/types/model';
import { readFileSync } from 'fs';
export type InitDateResponse = { export type InitDateResponse = {
beianText: string; beianText: string;
googleVerKey: string; googleVerKey: string;
baiduTongji: boolean; baiduTongji: string;
chatModels: ChatModelItemType[];
qaModels: QAModelItemType[];
vectorModels: VectorModelItemType[];
};
const defaultmodels = {
'Gpt35-4k': {
model: 'gpt-3.5-turbo',
name: 'Gpt35-4k',
contextMaxToken: 4000,
systemMaxToken: 2400,
maxTemperature: 1.2,
price: 1.5
},
'Gpt35-16k': {
model: 'gpt-3.5-turbo',
name: 'Gpt35-16k',
contextMaxToken: 16000,
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 3
},
Gpt4: {
model: 'gpt-4',
name: 'Gpt4',
contextMaxToken: 8000,
systemMaxToken: 4000,
maxTemperature: 1.2,
price: 45
}
};
const defaultQaModels = {
'Gpt35-16k': {
model: 'gpt-3.5-turbo',
name: 'Gpt35-16k',
maxToken: 16000,
price: 3
}
};
const defaultVectorModels = {
'text-embedding-ada-002': {
model: 'text-embedding-ada-002',
name: 'Embedding-2',
price: 0.2
}
}; };
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const envs = {
beianText: process.env.SAFE_BEIAN_TEXT || '',
googleVerKey: process.env.CLIENT_GOOGLE_VER_TOKEN || '',
baiduTongji: process.env.BAIDU_TONGJI || ''
};
jsonRes<InitDateResponse>(res, { jsonRes<InitDateResponse>(res, {
data: { data: {
beianText: process.env.SAFE_BEIAN_TEXT || '', ...envs,
googleVerKey: process.env.CLIENT_GOOGLE_VER_TOKEN || '', ...initSystemModels()
baiduTongji: process.env.BAIDU_TONGJI === '1'
} }
}); });
} }
export function initSystemModels() {
const { chatModels, qaModels, vectorModels } = (() => {
try {
const chatModels = Object.values(JSON.parse(readFileSync('data/ChatModels.json', 'utf-8')));
const qaModels = Object.values(JSON.parse(readFileSync('data/QAModels.json', 'utf-8')));
const vectorModels = Object.values(
JSON.parse(readFileSync('data/VectorModels.json', 'utf-8'))
);
return {
chatModels,
qaModels,
vectorModels
};
} catch (error) {
console.log(error);
return {
chatModels: Object.values(defaultmodels),
qaModels: Object.values(defaultQaModels),
vectorModels: Object.values(defaultVectorModels)
};
}
})() as {
chatModels: ChatModelItemType[];
qaModels: QAModelItemType[];
vectorModels: VectorModelItemType[];
};
global.chatModels = chatModels;
global.qaModels = qaModels;
global.vectorModels = vectorModels;
return {
chatModels,
qaModels,
vectorModels
};
}

View File

@ -1,31 +1,22 @@
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { System } from '@/service/models/system';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import { readFileSync } from 'fs';
export type InitDateResponse = {
beianText: string;
googleVerKey: string;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
await authUser({ req, authRoot: true }); await authUser({ req, authRoot: true });
updateSystemEnv(); updateSystemEnv();
jsonRes<InitDateResponse>(res); jsonRes(res);
} }
export async function updateSystemEnv() { export async function updateSystemEnv() {
try { try {
const mongoData = await System.findOne(); const res = JSON.parse(readFileSync('data/SystemParams.json', 'utf-8'));
if (mongoData) { global.systemEnv = {
const obj = mongoData.toObject(); ...global.systemEnv,
global.systemEnv = { ...res
...global.systemEnv, };
...obj
};
}
console.log('update env', global.systemEnv);
} catch (error) { } catch (error) {
console.log('update system env error'); console.log('update system env error');
} }

View File

@ -15,6 +15,8 @@ import { SystemInputEnum } from '@/constants/app';
import { streamFetch } from '@/api/fetch'; import { streamFetch } from '@/api/fetch';
import MyTooltip from '@/components/MyTooltip'; import MyTooltip from '@/components/MyTooltip';
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox'; import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
export type ChatTestComponentRef = { export type ChatTestComponentRef = {
resetChatTest: () => void; resetChatTest: () => void;
@ -34,6 +36,7 @@ const ChatTest = (
) => { ) => {
const BoxRef = useRef(null); const BoxRef = useRef(null);
const ChatBoxRef = useRef<ComponentRef>(null); const ChatBoxRef = useRef<ComponentRef>(null);
const { toast } = useToast();
const isOpen = useMemo(() => modules && modules.length > 0, [modules]); const isOpen = useMemo(() => modules && modules.length > 0, [modules]);
const variableModules = useMemo( const variableModules = useMemo(
@ -60,21 +63,30 @@ const ChatTest = (
const history = messages.slice(-historyMaxLen - 2, -2); const history = messages.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据 // 流请求,获取数据
const { responseText } = await streamFetch({ const { responseText, errMsg } = await streamFetch({
url: '/api/chat/chatTest', url: '/api/chat/chatTest',
data: { data: {
history, history,
prompt: messages[messages.length - 2].content, prompt: messages[messages.length - 2].content,
modules, modules,
variables variables,
appId: app._id,
appName: `调试-${app.name}`
}, },
onMessage: generatingMessage, onMessage: generatingMessage,
abortSignal: controller abortSignal: controller
}); });
if (errMsg) {
return Promise.reject({
message: errMsg,
responseText
});
}
return { responseText }; return { responseText };
}, },
[modules] [app._id, app.name, modules]
); );
useOutsideClick({ useOutsideClick({

View File

@ -6,14 +6,14 @@ import { FlowModuleItemType } from '@/types/flow';
import Divider from './modules/Divider'; import Divider from './modules/Divider';
import Container from './modules/Container'; import Container from './modules/Container';
import RenderInput from './render/RenderInput'; import RenderInput from './render/RenderInput';
import type { ClassifyQuestionAgentItemType } from '@/types/app'; import type { RecognizeIntentionAgentItemType } from '@/types/app';
import { Handle, Position } from 'reactflow'; import { Handle, Position } from 'reactflow';
import { customAlphabet } from 'nanoid'; import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 4); const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 4);
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import { FlowOutputItemTypeEnum } from '@/constants/flow'; import { FlowOutputItemTypeEnum } from '@/constants/flow';
const NodeCQNode = ({ const NodeRINode = ({
data: { moduleId, inputs, outputs, onChangeNode, ...props } data: { moduleId, inputs, outputs, onChangeNode, ...props }
}: NodeProps<FlowModuleItemType>) => { }: NodeProps<FlowModuleItemType>) => {
return ( return (
@ -30,7 +30,7 @@ const NodeCQNode = ({
value: agents = [] value: agents = []
}: { }: {
key: string; key: string;
value?: ClassifyQuestionAgentItemType[]; value?: RecognizeIntentionAgentItemType[];
}) => ( }) => (
<Box> <Box>
{agents.map((item, i) => ( {agents.map((item, i) => (
@ -133,4 +133,4 @@ const NodeCQNode = ({
</NodeCard> </NodeCard>
); );
}; };
export default React.memo(NodeCQNode); export default React.memo(NodeRINode);

View File

@ -49,7 +49,7 @@ const NodeAnswer = dynamic(() => import('./components/NodeAnswer'), {
const NodeQuestionInput = dynamic(() => import('./components/NodeQuestionInput'), { const NodeQuestionInput = dynamic(() => import('./components/NodeQuestionInput'), {
ssr: false ssr: false
}); });
const NodeCQNode = dynamic(() => import('./components/NodeCQNode'), { const NodeRINode = dynamic(() => import('./components/NodeRINode'), {
ssr: false ssr: false
}); });
const NodeUserGuide = dynamic(() => import('./components/NodeUserGuide'), { const NodeUserGuide = dynamic(() => import('./components/NodeUserGuide'), {
@ -70,7 +70,7 @@ const nodeTypes = {
[FlowModuleTypeEnum.kbSearchNode]: NodeKbSearch, [FlowModuleTypeEnum.kbSearchNode]: NodeKbSearch,
[FlowModuleTypeEnum.tfSwitchNode]: NodeTFSwitch, [FlowModuleTypeEnum.tfSwitchNode]: NodeTFSwitch,
[FlowModuleTypeEnum.answerNode]: NodeAnswer, [FlowModuleTypeEnum.answerNode]: NodeAnswer,
[FlowModuleTypeEnum.classifyQuestionNode]: NodeCQNode [FlowModuleTypeEnum.recognizeIntention]: NodeRINode
}; };
const edgeTypes = { const edgeTypes = {
buttonedge: ButtonEdge buttonedge: ButtonEdge

View File

@ -9,7 +9,6 @@ import {
Box, Box,
useTheme useTheme
} from '@chakra-ui/react'; } from '@chakra-ui/react';
import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import InputDataModal from '@/pages/kb/components/InputDataModal'; import InputDataModal from '@/pages/kb/components/InputDataModal';
import { getKbDataItemById } from '@/api/plugins/kb'; import { getKbDataItemById } from '@/api/plugins/kb';

View File

@ -4,6 +4,7 @@ import Markdown from '@/components/Markdown';
import { useMarkdown } from '@/hooks/useMarkdown'; import { useMarkdown } from '@/hooks/useMarkdown';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { useGlobalStore } from '@/store/global'; import { useGlobalStore } from '@/store/global';
import { beianText } from '@/store/static';
import styles from './index.module.scss'; import styles from './index.module.scss';
import axios from 'axios'; import axios from 'axios';
@ -13,10 +14,7 @@ const Home = () => {
const router = useRouter(); const router = useRouter();
const { inviterId } = router.query as { inviterId: string }; const { inviterId } = router.query as { inviterId: string };
const { data } = useMarkdown({ url: '/intro.md' }); const { data } = useMarkdown({ url: '/intro.md' });
const { const { isPc } = useGlobalStore();
isPc,
initData: { beianText }
} = useGlobalStore();
const [star, setStar] = useState(1500); const [star, setStar] = useState(1500);
useEffect(() => { useEffect(() => {

View File

@ -17,6 +17,7 @@ import { useToast } from '@/hooks/useToast';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import { getErrText } from '@/utils/tools'; import { getErrText } from '@/utils/tools';
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import { vectorModelList } from '@/store/static';
export type FormData = { dataId?: string; a: string; q: string }; export type FormData = { dataId?: string; a: string; q: string };
@ -65,6 +66,7 @@ const InputDataModal = ({
}; };
const { insertLen } = await postKbDataFromList({ const { insertLen } = await postKbDataFromList({
kbId, kbId,
model: vectorModelList[0].model,
mode: TrainingModeEnum.index, mode: TrainingModeEnum.index,
data: [data] data: [data]
}); });

View File

@ -1,4 +1,4 @@
import React, { useState, useCallback, useRef } from 'react'; import React, { useState, useCallback } from 'react';
import { import {
Box, Box,
Flex, Flex,
@ -22,9 +22,9 @@ import Radio from '@/components/Radio';
import { splitText_token } from '@/utils/file'; import { splitText_token } from '@/utils/file';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import { getErrText } from '@/utils/tools'; import { getErrText } from '@/utils/tools';
import { ChatModelMap, OpenAiChatEnum, embeddingPrice } from '@/constants/model';
import { formatPrice } from '@/utils/user'; import { formatPrice } from '@/utils/user';
import MySlider from '@/components/Slider'; import MySlider from '@/components/Slider';
import { qaModelList, vectorModelList } from '@/store/static';
const fileExtension = '.txt,.doc,.docx,.pdf,.md'; const fileExtension = '.txt,.doc,.docx,.pdf,.md';
@ -39,12 +39,14 @@ const SelectFileModal = ({
}) => { }) => {
const [modeMap, setModeMap] = useState({ const [modeMap, setModeMap] = useState({
[TrainingModeEnum.qa]: { [TrainingModeEnum.qa]: {
maxLen: 8000, model: qaModelList[0].model,
price: ChatModelMap[OpenAiChatEnum.GPT3516k].price maxLen: (qaModelList[0]?.maxToken || 16000) * 0.5,
price: qaModelList[0]?.price || 3
}, },
[TrainingModeEnum.index]: { [TrainingModeEnum.index]: {
model: vectorModelList[0].model,
maxLen: 600, maxLen: 600,
price: embeddingPrice price: vectorModelList[0]?.price || 0.2
} }
}); });
const [btnLoading, setBtnLoading] = useState(false); const [btnLoading, setBtnLoading] = useState(false);
@ -111,6 +113,7 @@ const SelectFileModal = ({
}, },
[toast] [toast]
); );
console.log({ model: modeMap[mode].model });
const { mutate, isLoading: uploading } = useMutation({ const { mutate, isLoading: uploading } = useMutation({
mutationFn: async () => { mutationFn: async () => {
@ -122,6 +125,7 @@ const SelectFileModal = ({
for (let i = 0; i < splitRes.chunks.length; i += step) { for (let i = 0; i < splitRes.chunks.length; i += step) {
const { insertLen } = await postKbDataFromList({ const { insertLen } = await postKbDataFromList({
kbId, kbId,
model: modeMap[mode].model,
data: splitRes.chunks data: splitRes.chunks
.slice(i, i + step) .slice(i, i + step)
.map((item) => ({ q: item.value, a: '', source: item.filename })), .map((item) => ({ q: item.value, a: '', source: item.filename })),
@ -275,8 +279,8 @@ const SelectFileModal = ({
setModeMap((state) => ({ setModeMap((state) => ({
...state, ...state,
[TrainingModeEnum.index]: { [TrainingModeEnum.index]: {
maxLen: val, ...modeMap[TrainingModeEnum.index],
price: embeddingPrice maxLen: val
} }
})); }));
}} }}

View File

@ -1,6 +1,6 @@
import React, { useState } from 'react'; import React, { useState } from 'react';
import { Table, Thead, Tbody, Tr, Th, Td, TableContainer, Flex, Box } from '@chakra-ui/react'; import { Table, Thead, Tbody, Tr, Th, Td, TableContainer, Flex, Box } from '@chakra-ui/react';
import { BillTypeMap } from '@/constants/user'; import { BillSourceMap } from '@/constants/user';
import { getUserBills } from '@/api/user'; import { getUserBills } from '@/api/user';
import type { UserBillType } from '@/types/user'; import type { UserBillType } from '@/types/user';
import { usePagination } from '@/hooks/usePagination'; import { usePagination } from '@/hooks/usePagination';
@ -39,10 +39,8 @@ const BillTable = () => {
<Thead> <Thead>
<Tr> <Tr>
<Th></Th> <Th></Th>
<Th></Th> <Th></Th>
<Th></Th> <Th></Th>
<Th></Th>
<Th>Tokens </Th>
<Th></Th> <Th></Th>
</Tr> </Tr>
</Thead> </Thead>
@ -50,11 +48,9 @@ const BillTable = () => {
{bills.map((item) => ( {bills.map((item) => (
<Tr key={item.id}> <Tr key={item.id}>
<Td>{dayjs(item.time).format('YYYY/MM/DD HH:mm:ss')}</Td> <Td>{dayjs(item.time).format('YYYY/MM/DD HH:mm:ss')}</Td>
<Td>{BillTypeMap[item.type] || '-'}</Td> <Td>{BillSourceMap[item.source]}</Td>
<Td>{item.modelName}</Td> <Td>{item.appName || '-'}</Td>
<Td>{item.textLen}</Td> <Td>{item.total}</Td>
<Td>{item.tokenLen}</Td>
<Td>{item.price}</Td>
</Tr> </Tr>
))} ))}
</Tbody> </Tbody>

View File

@ -0,0 +1,114 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
interface ConfigType {
headers?: { [key: string]: string };
hold?: boolean;
timeout?: number;
}
interface ResponseDataType {
code: number;
message: string;
data: any;
}
/**
*
*/
function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
if (config.headers) {
// config.headers.Authorization = getToken();
}
return config;
}
/**
* ,
*/
function responseSuccess(response: AxiosResponse<ResponseDataType>) {
return response;
}
/**
*
*/
function checkRes(data: ResponseDataType) {
if (data === undefined) {
console.log('error->', data, 'data is empty');
return Promise.reject('服务器异常');
} else if (data.code < 200 || data.code >= 400) {
return Promise.reject(data);
}
return data.data;
}
/**
*
*/
function responseError(err: any) {
console.log('error->', '请求错误', err);
if (!err) {
return Promise.reject({ message: '未知错误' });
}
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
return Promise.reject(err);
}
/* 创建请求实例 */
const instance = axios.create({
timeout: 60000, // 超时时间
headers: {
'content-type': 'application/json'
}
});
/* 请求拦截 */
instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
/* 响应拦截 */
instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
function request(url: string, data: any, config: ConfigType, method: Method): any {
/* 去空 */
for (const key in data) {
if (data[key] === null || data[key] === undefined) {
delete data[key];
}
}
return instance
.request({
baseURL: `http://localhost:${process.env.PORT || 3000}/api`,
url,
method,
data: ['POST', 'PUT'].includes(method) ? data : null,
params: !['POST', 'PUT'].includes(method) ? data : null,
...config // 用户自定义配置,可以覆盖前面的配置
})
.then((res) => checkRes(res.data))
.catch((err) => responseError(err));
}
/**
* api请求方式
* @param {String} url
* @param {Any} params
* @param {Object} config
* @returns
*/
export function GET<T>(url: string, params = {}, config: ConfigType = {}): Promise<T> {
return request(url, params, config, 'GET');
}
export function POST<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'POST');
}
export function PUT<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'PUT');
}
export function DELETE<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'DELETE');
}

View File

@ -93,6 +93,8 @@ export const moduleFetch = ({ url, data, res }: Props) =>
event: sseResponseEventEnum.answer, event: sseResponseEventEnum.answer,
data: JSON.stringify(data) data: JSON.stringify(data)
}); });
} else if (item.event === sseResponseEventEnum.error) {
return reject(getErrText(data, '流响应错误'));
} }
}); });
read(); read();

View File

@ -1,15 +1,16 @@
import { TrainingData } from '@/service/mongo'; import { TrainingData } from '@/service/mongo';
import { getApiKey } from '../utils/auth';
import { OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
import { pushSplitDataBill } from '@/service/events/pushBill'; import { pushSplitDataBill } from '@/service/events/pushBill';
import { openaiAccountError } from '../errorCode'; import { openaiAccountError } from '../errorCode';
import { modelServiceToolMap } from '../utils/chat';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user'; import { BillSourceEnum } from '@/constants/user';
import { pushDataToKb } from '@/pages/api/openapi/kb/pushData'; import { pushDataToKb } from '@/pages/api/openapi/kb/pushData';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode'; import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send'; import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getOpenAIApi } from '../ai/openai';
import { ChatCompletionRequestMessage } from 'openai';
const reduceQueue = () => { const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0; global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@ -37,7 +38,8 @@ export async function generateQA(): Promise<any> {
kbId: 1, kbId: 1,
prompt: 1, prompt: 1,
q: 1, q: 1,
source: 1 source: 1,
model: 1
}); });
// task preemption // task preemption
@ -51,54 +53,59 @@ export async function generateQA(): Promise<any> {
userId = String(data.userId); userId = String(data.userId);
const kbId = String(data.kbId); const kbId = String(data.kbId);
// 余额校验并获取 openapi Key await authBalanceByUid(userId);
const { systemAuthKey } = await getApiKey({
model: OpenAiChatEnum.GPT35,
userId,
mustPay: true
});
const startTime = Date.now(); const startTime = Date.now();
const chatAPI = getOpenAIApi();
// 请求 chatgpt 获取回答 // 请求 chatgpt 获取回答
const response = await Promise.all( const response = await Promise.all(
[data.q].map((text) => [data.q].map((text) => {
modelServiceToolMap const messages: ChatCompletionRequestMessage[] = [
.chatCompletion({ {
model: OpenAiChatEnum.GPT3516k, role: 'system',
apiKey: systemAuthKey, content: `你是出题人.
temperature: 0.8,
messages: [
{
obj: ChatRoleEnum.System,
value: `你是出题人.
${data.prompt || '用户会发送一段长文本'}. ${data.prompt || '用户会发送一段长文本'}.
25 . . 按格式回答: Q1: 25 . . 按格式回答: Q1:
A1: A1:
Q2: Q2:
A2: A2:
...` ...`
}, },
{ {
obj: 'Human', role: 'user',
value: text content: text
} }
], ];
stream: false return chatAPI
}) .createChatCompletion(
.then(({ totalTokens, responseText, responseMessages }) => { {
const result = formatSplitText(responseText); // 格式化后的QA对 model: data.model,
temperature: 0.8,
messages,
stream: false
},
{
timeout: 480000,
...axiosConfig()
}
)
.then((res) => {
const answer = res.data.choices?.[0].message?.content;
const totalTokens = res.data.usage?.total_tokens || 0;
const result = formatSplitText(answer || ''); // 格式化后的QA对
console.log(`split result length: `, result.length); console.log(`split result length: `, result.length);
// 计费 // 计费
pushSplitDataBill({ pushSplitDataBill({
isPay: result.length > 0,
userId: data.userId, userId: data.userId,
type: BillTypeEnum.QA, totalTokens,
textLen: responseMessages.map((item) => item.value).join('').length, model: data.model,
totalTokens appName: 'QA 拆分'
}); });
return { return {
rawContent: responseText, rawContent: answer,
result result
}; };
}) })
@ -106,8 +113,8 @@ A2:
console.log('QA拆分错误'); console.log('QA拆分错误');
console.log(err.response?.status, err.response?.statusText, err.response?.data); console.log(err.response?.status, err.response?.statusText, err.response?.data);
return Promise.reject(err); return Promise.reject(err);
}) });
) })
); );
const responseList = response.map((item) => item.result).flat(); const responseList = response.map((item) => item.result).flat();
@ -120,6 +127,7 @@ A2:
source: data.source source: data.source
})), })),
userId, userId,
model: global.vectorModels[0].model,
mode: TrainingModeEnum.index mode: TrainingModeEnum.index
}); });

View File

@ -1,6 +1,6 @@
import { openaiAccountError } from '../errorCode'; import { openaiAccountError } from '../errorCode';
import { insertKbItem } from '@/service/pg'; import { insertKbItem } from '@/service/pg';
import { openaiEmbedding } from '@/pages/api/openapi/plugin/openaiEmbedding'; import { getVector } from '@/pages/api/openapi/plugin/vector';
import { TrainingData } from '../models/trainingData'; import { TrainingData } from '../models/trainingData';
import { ERROR_ENUM } from '../errorCode'; import { ERROR_ENUM } from '../errorCode';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
@ -33,7 +33,8 @@ export async function generateVector(): Promise<any> {
kbId: 1, kbId: 1,
q: 1, q: 1,
a: 1, a: 1,
source: 1 source: 1,
model: 1
}); });
// task preemption // task preemption
@ -55,10 +56,10 @@ export async function generateVector(): Promise<any> {
]; ];
// 生成词向量 // 生成词向量
const vectors = await openaiEmbedding({ const vectors = await getVector({
model: data.model,
input: dataItems.map((item) => item.q), input: dataItems.map((item) => item.q),
userId, userId
mustPay: true
}); });
// 生成结果插入到 pg // 生成结果插入到 pg

View File

@ -1,66 +1,85 @@
import { connectToDatabase, Bill, User, ShareChat } from '../mongo'; import { connectToDatabase, Bill, User, ShareChat } from '../mongo';
import { import { BillSourceEnum } from '@/constants/user';
ChatModelMap, import { getModel } from '../utils/data';
OpenAiChatEnum, import type { BillListItemType } from '@/types/mongoSchema';
ChatModelType,
embeddingModel,
embeddingPrice
} from '@/constants/model';
import { BillTypeEnum } from '@/constants/user';
export const pushChatBill = async ({ export const createTaskBill = async ({
isPay, appName,
chatModel,
userId,
appId, appId,
textLen, userId,
tokens, source
type
}: { }: {
isPay: boolean; appName: string;
chatModel: ChatModelType;
userId: string;
appId: string; appId: string;
textLen: number; userId: string;
tokens: number; source: `${BillSourceEnum}`;
type: BillTypeEnum.chat | BillTypeEnum.openapiChat;
}) => { }) => {
console.log(`chat generate success. text len: ${textLen}. token len: ${tokens}. pay:${isPay}`); const res = await Bill.create({
if (!isPay) return; userId,
appName,
appId,
total: 0,
source,
list: []
});
return String(res._id);
};
let billId = ''; export const pushTaskBillListItem = async ({
billId,
moduleName,
amount,
model,
tokenLen
}: { billId?: string } & BillListItemType) => {
if (!billId) return;
try {
await Bill.findByIdAndUpdate(billId, {
$push: {
list: {
moduleName,
amount,
model,
tokenLen
}
}
});
} catch (error) {}
};
export const finishTaskBill = async ({ billId }: { billId: string }) => {
try {
// update bill
const res = await Bill.findByIdAndUpdate(billId, [
{
$set: {
total: {
$sum: '$list.amount'
},
time: new Date()
}
}
]);
if (!res) return;
const total = res.list.reduce((sum, item) => sum + item.amount, 0) || 0;
console.log('finish bill:', total);
// 账号扣费
await User.findByIdAndUpdate(res.userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('Finish bill failed:', error);
billId && Bill.findByIdAndDelete(billId);
}
};
export const delTaskBill = async (billId?: string) => {
if (!billId) return;
try { try {
await connectToDatabase(); await Bill.findByIdAndRemove(billId);
} catch (error) {}
// 计算价格
const unitPrice = ChatModelMap[chatModel]?.price || 3;
const price = unitPrice * tokens;
try {
// 插入 Bill 记录
const res = await Bill.create({
userId,
type,
modelName: chatModel,
appId,
textLen,
tokenLen: tokens,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
console.log(error);
}
}; };
export const updateShareChatBill = async ({ export const updateShareChatBill = async ({
@ -81,22 +100,17 @@ export const updateShareChatBill = async ({
}; };
export const pushSplitDataBill = async ({ export const pushSplitDataBill = async ({
isPay,
userId, userId,
totalTokens, totalTokens,
textLen, model,
type appName
}: { }: {
isPay: boolean; model: string;
userId: string; userId: string;
totalTokens: number; totalTokens: number;
textLen: number; appName: string;
type: BillTypeEnum.QA;
}) => { }) => {
console.log( console.log(`splitData generate success. token len: ${totalTokens}.`);
`splitData generate success. text len: ${textLen}. token len: ${totalTokens}. pay:${isPay}`
);
if (!isPay) return;
let billId; let billId;
@ -104,24 +118,22 @@ export const pushSplitDataBill = async ({
await connectToDatabase(); await connectToDatabase();
// 获取模型单价格, 都是用 gpt35 拆分 // 获取模型单价格, 都是用 gpt35 拆分
const unitPrice = ChatModelMap[OpenAiChatEnum.GPT3516k].price || 3; const unitPrice = global.chatModels.find((item) => item.model === model)?.price || 3;
// 计算价格 // 计算价格
const price = unitPrice * totalTokens; const total = unitPrice * totalTokens;
// 插入 Bill 记录 // 插入 Bill 记录
const res = await Bill.create({ const res = await Bill.create({
userId, userId,
type, appName,
modelName: OpenAiChatEnum.GPT3516k,
textLen,
tokenLen: totalTokens, tokenLen: totalTokens,
price total
}); });
billId = res._id; billId = res._id;
// 账号扣费 // 账号扣费
await User.findByIdAndUpdate(userId, { await User.findByIdAndUpdate(userId, {
$inc: { balance: -price } $inc: { balance: -total }
}); });
} catch (error) { } catch (error) {
console.log('创建账单失败:', error); console.log('创建账单失败:', error);
@ -130,21 +142,14 @@ export const pushSplitDataBill = async ({
}; };
export const pushGenerateVectorBill = async ({ export const pushGenerateVectorBill = async ({
isPay,
userId, userId,
text, tokenLen,
tokenLen model
}: { }: {
isPay: boolean;
userId: string; userId: string;
text: string;
tokenLen: number; tokenLen: number;
model: string;
}) => { }) => {
// console.log(
// `vector generate success. text len: ${text.length}. token len: ${tokenLen}. pay:${isPay}`
// );
if (!isPay) return;
let billId; let billId;
try { try {
@ -152,23 +157,22 @@ export const pushGenerateVectorBill = async ({
try { try {
// 计算价格. 至少为1 // 计算价格. 至少为1
let price = embeddingPrice * tokenLen; const unitPrice = global.vectorModels.find((item) => item.model === model)?.price || 0.2;
price = price > 1 ? price : 1; let total = unitPrice * tokenLen;
total = total > 1 ? total : 1;
// 插入 Bill 记录 // 插入 Bill 记录
const res = await Bill.create({ const res = await Bill.create({
userId, userId,
type: BillTypeEnum.vector, model,
modelName: embeddingModel, appName: '索引生成',
textLen: text.length, total
tokenLen,
price
}); });
billId = res._id; billId = res._id;
// 账号扣费 // 账号扣费
await User.findByIdAndUpdate(userId, { await User.findByIdAndUpdate(userId, {
$inc: { balance: -price } $inc: { balance: -total }
}); });
} catch (error) { } catch (error) {
console.log('创建账单失败:', error); console.log('创建账单失败:', error);
@ -178,3 +182,9 @@ export const pushGenerateVectorBill = async ({
console.log(error); console.log(error);
} }
}; };
export const countModelPrice = ({ model, tokens }: { model: string; tokens: number }) => {
const modelData = getModel(model);
if (!modelData) return 0;
return modelData.price * tokens;
};

View File

@ -1,6 +1,5 @@
import { Schema, model, models, Model } from 'mongoose'; import { Schema, model, models, Model } from 'mongoose';
import { AppSchema as AppType } from '@/types/mongoSchema'; import { AppSchema as AppType } from '@/types/mongoSchema';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
const AppSchema = new Schema({ const AppSchema = new Schema({
userId: { userId: {
@ -24,50 +23,6 @@ const AppSchema = new Schema({
type: Date, type: Date,
default: () => new Date() default: () => new Date()
}, },
chat: {
relatedKbs: {
type: [Schema.Types.ObjectId],
ref: 'kb',
default: []
},
searchSimilarity: {
type: Number,
default: 0.8
},
searchLimit: {
type: Number,
default: 5
},
searchEmptyText: {
type: String,
default: ''
},
systemPrompt: {
type: String,
default: ''
},
limitPrompt: {
type: String,
default: ''
},
maxToken: {
type: Number,
default: 4000,
min: 100
},
temperature: {
type: Number,
min: 0,
max: 10,
default: 0
},
chatModel: {
// 聊天时使用的模型
type: String,
enum: Object.keys(ChatModelMap),
default: OpenAiChatEnum.GPT3516k
}
},
share: { share: {
topNum: { topNum: {
type: Number, type: Number,

View File

@ -1,7 +1,6 @@
import { Schema, model, models, Model } from 'mongoose'; import { Schema, model, models, Model } from 'mongoose';
import { ChatModelMap, embeddingModel } from '@/constants/model';
import { BillSchema as BillType } from '@/types/mongoSchema'; import { BillSchema as BillType } from '@/types/mongoSchema';
import { BillTypeMap } from '@/constants/user'; import { BillSourceEnum, BillSourceMap } from '@/constants/user';
const BillSchema = new Schema({ const BillSchema = new Schema({
userId: { userId: {
@ -9,36 +8,48 @@ const BillSchema = new Schema({
ref: 'user', ref: 'user',
required: true required: true
}, },
type: { appName: {
type: String, type: String,
enum: Object.keys(BillTypeMap), default: ''
required: true
},
modelName: {
type: String,
enum: [...Object.keys(ChatModelMap), embeddingModel]
}, },
appId: { appId: {
type: Schema.Types.ObjectId, type: Schema.Types.ObjectId,
ref: 'app' ref: 'app',
required: false
}, },
time: { time: {
type: Date, type: Date,
default: () => new Date() default: () => new Date()
}, },
textLen: { total: {
// 提示词+响应的总字数
type: Number, type: Number,
required: true required: true
}, },
tokenLen: { source: {
// 折算成 token 的数量 type: String,
type: Number, enum: Object.keys(BillSourceMap),
required: true default: BillSourceEnum.fastgpt
}, },
price: { list: {
type: Number, type: [
required: true {
moduleName: {
type: String,
required: true
},
amount: {
type: Number,
required: true
},
model: {
type: String
},
tokenLen: {
type: Number
}
}
],
default: []
} }
}); });

View File

@ -1,22 +0,0 @@
import { Schema, model, models } from 'mongoose';
const SystemSchema = new Schema({
vectorMaxProcess: {
type: Number,
default: 10
},
qaMaxProcess: {
type: Number,
default: 10
},
pgIvfflatProbe: {
type: Number,
default: 10
},
sensitiveCheck: {
type: Boolean,
default: false
}
});
export const System = models['system'] || model('system', SystemSchema);

View File

@ -28,13 +28,16 @@ const TrainingDataSchema = new Schema({
enum: Object.keys(TrainingTypeMap), enum: Object.keys(TrainingTypeMap),
required: true required: true
}, },
model: {
type: String,
required: true
},
prompt: { prompt: {
// 拆分时的提示词 // qa split prompt
type: String, type: String,
default: '' default: ''
}, },
q: { q: {
// 如果是
type: String, type: String,
default: '' default: ''
}, },

View File

@ -2,6 +2,7 @@ import mongoose from 'mongoose';
import tunnel from 'tunnel'; import tunnel from 'tunnel';
import { startQueue } from './utils/tools'; import { startQueue } from './utils/tools';
import { updateSystemEnv } from '@/pages/api/system/updateEnv'; import { updateSystemEnv } from '@/pages/api/system/updateEnv';
import { initSystemModels } from '@/pages/api/system/getInitData';
/** /**
* MongoDB * MongoDB
@ -10,6 +11,7 @@ export async function connectToDatabase(): Promise<void> {
if (global.mongodb) { if (global.mongodb) {
return; return;
} }
global.mongodb = 'connecting';
// init global data // init global data
global.qaQueueLen = 0; global.qaQueueLen = 0;
@ -31,8 +33,9 @@ export async function connectToDatabase(): Promise<void> {
} }
}); });
} }
initSystemModels();
updateSystemEnv();
global.mongodb = 'connecting';
try { try {
mongoose.set('strictQuery', true); mongoose.set('strictQuery', true);
global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, { global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, {
@ -49,7 +52,6 @@ export async function connectToDatabase(): Promise<void> {
} }
// init function // init function
updateSystemEnv();
startQueue(); startQueue();
} }
@ -66,5 +68,4 @@ export * from './models/collection';
export * from './models/shareChat'; export * from './models/shareChat';
export * from './models/kb'; export * from './models/kb';
export * from './models/inform'; export * from './models/inform';
export * from './models/system';
export * from './models/image'; export * from './models/image';

View File

@ -92,7 +92,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
} else if (openaiError[error?.response?.statusText]) { } else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText]; msg = openaiError[error.response.statusText];
} }
console.log('sse error', error); console.log('sse error => ', error);
sseResponse({ sseResponse({
res, res,

View File

@ -1,15 +1,11 @@
import type { NextApiRequest } from 'next'; import type { NextApiRequest } from 'next';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import Cookie from 'cookie'; import Cookie from 'cookie';
import { Chat, App, OpenApi, User, ShareChat, KB } from '../mongo'; import { App, OpenApi, User, ShareChat, KB } from '../mongo';
import type { AppSchema } from '@/types/mongoSchema'; import type { AppSchema } from '@/types/mongoSchema';
import type { ChatItemType } from '@/types/chat';
import mongoose from 'mongoose';
import { defaultApp } from '@/constants/model'; import { defaultApp } from '@/constants/model';
import { formatPrice } from '@/utils/user'; import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode'; import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export type AuthType = 'token' | 'root' | 'apikey'; export type AuthType = 'token' | 'root' | 'apikey';
@ -35,6 +31,19 @@ export const parseCookie = (cookie?: string): Promise<string> => {
}); });
}; };
/* auth balance */
export const authBalanceByUid = async (uid: string) => {
const user = await User.findById(uid);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (!user.openaiKey && formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
return user;
};
/* uniform auth user */ /* uniform auth user */
export const authUser = async ({ export const authUser = async ({
req, req,
@ -144,14 +153,7 @@ export const authUser = async ({
// balance check // balance check
if (authBalance) { if (authBalance) {
const user = await User.findById(uid); await authBalanceByUid(uid);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (!user.openaiKey && formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
} }
return { return {
@ -166,43 +168,6 @@ export const getSystemOpenAiKey = () => {
return process.env.ONEAPI_KEY || process.env.OPENAIKEY || ''; return process.env.ONEAPI_KEY || process.env.OPENAIKEY || '';
}; };
/* 获取 api 请求的 key */
export const getApiKey = async ({
model,
userId,
mustPay = false
}: {
model: ChatModelType;
userId: string;
mustPay?: boolean;
}) => {
const user = await User.findById(userId, 'openaiKey balance');
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const userOpenAiKey = user.openaiKey || '';
const systemAuthKey = getSystemOpenAiKey();
// 有自己的key
if (!mustPay && userOpenAiKey) {
return {
userOpenAiKey,
systemAuthKey: ''
};
}
// 平台账号余额校验
if (formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
return {
userOpenAiKey: '',
systemAuthKey
};
};
// 模型使用权校验 // 模型使用权校验
export const authApp = async ({ export const authApp = async ({
appId, appId,
@ -232,14 +197,6 @@ export const authApp = async ({
if (userId !== String(app.userId)) return Promise.reject(ERROR_ENUM.unAuthModel); if (userId !== String(app.userId)) return Promise.reject(ERROR_ENUM.unAuthModel);
} }
// do not share detail info
if (!reserveDetail && !app.share.isShareDetail && userId !== String(app.userId)) {
app.chat = {
...defaultApp.chat,
chatModel: app.chat.chatModel
};
}
return { return {
app, app,
showModelDetail: userId === String(app.userId) showModelDetail: userId === String(app.userId)

View File

@ -1,13 +1,8 @@
import { ChatItemType } from '@/types/chat'; import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model'; import { ChatRoleEnum } from '@/constants/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '../tools';
import { OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { textAdaptGptResponse } from '@/utils/adapt';
import { parseStreamChunk } from '@/utils/adapt';
export type ChatCompletionType = { export type ChatCompletionType = {
apiKey: string; apiKey: string;
@ -36,11 +31,6 @@ export type StreamResponseReturnType = {
finishMessages: ChatItemType[]; finishMessages: ChatItemType[];
}; };
export const modelServiceToolMap = {
chatCompletion: chatResponse,
streamResponse: openAiStreamResponse
};
/* delete invalid symbol */ /* delete invalid symbol */
const simplifyStr = (str = '') => const simplifyStr = (str = '') =>
str str
@ -54,7 +44,7 @@ export const ChatContextFilter = ({
prompts, prompts,
maxTokens maxTokens
}: { }: {
model: ChatModelType; model: string;
prompts: ChatItemType[]; prompts: ChatItemType[];
maxTokens: number; maxTokens: number;
}) => { }) => {
@ -111,126 +101,3 @@ export const ChatContextFilter = ({
return [...systemPrompts, ...chats]; return [...systemPrompts, ...chats];
}; };
/* stream response */
export const resStreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
const { responseContent, totalTokens, finishMessages } = await modelServiceToolMap.streamResponse(
{
chatResponse,
prompts,
res,
model
}
);
return { responseContent, totalTokens, finishMessages };
};
/* stream response */
export const V2_StreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
let responseContent = '';
let error: any = null;
let truncateData = '';
const clientRes = async (data: string) => {
//部分代理会导致流式传输时的数据被截断不为json格式这里做一个兼容
const { content = '' } = (() => {
try {
if (truncateData) {
try {
//判断是否为json如果是的话直接跳过后续拼装操作注意极端情况下可能出现截断成3截以上情况也可以兼容
JSON.parse(data);
} catch (e) {
data = truncateData + data;
}
truncateData = '';
}
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
error = json.error;
responseContent += content;
return { content };
} catch (error) {
truncateData = data;
return {};
}
})();
if (res.closed || error) return;
if (data === '[DONE]') {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
})
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
} else {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: content
})
});
}
};
try {
for await (const chunk of chatResponse.data as any) {
if (res.closed) break;
const parse = parseStreamChunk(chunk);
parse.forEach((item) => clientRes(item.data));
}
} catch (error) {
console.log('pipe error', error);
}
if (error) {
console.log(error);
return Promise.reject(error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap.countTokens({
model,
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
};

View File

@ -1,133 +0,0 @@
import { Configuration, OpenAIApi } from 'openai';
import { axiosConfig } from '../tools';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatCompletionType, ChatContextFilter, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
import { parseStreamChunk } from '@/utils/adapt';
export const getOpenAIApi = (apiKey: string) => {
const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
return new OpenAIApi(
new Configuration({
basePath: apiKey === process.env.ONEAPI_KEY ? process.env.ONEAPI_URL : openaiBaseUrl
})
);
};
/* 模型对话 */
export const chatResponse = async ({
model,
apiKey,
temperature,
maxToken = 4000,
messages,
stream
}: ChatCompletionType & { model: `${OpenAiChatEnum}` }) => {
const modelTokenLimit = ChatModelMap[model]?.contextMaxToken || 4000;
const filterMessages = ChatContextFilter({
model,
prompts: messages,
maxTokens: Math.ceil(modelTokenLimit - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi(apiKey);
const promptsToken = modelToolMap.countTokens({
model,
messages: filterMessages
});
maxToken = maxToken + promptsToken > modelTokenLimit ? modelTokenLimit - promptsToken : maxToken;
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature || 0),
max_tokens: maxToken,
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream
// stop: ['.!?。']
},
{
timeout: stream ? 60000 : 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(apiKey)
}
);
const responseText = stream ? '' : response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
return {
streamResponse: response,
responseMessages: filterMessages.concat({ obj: 'AI', value: responseText }),
responseText,
totalTokens
};
};
/* openai stream response */
export const openAiStreamResponse = async ({
res,
model,
chatResponse,
prompts
}: StreamResponseType & {
model: `${OpenAiChatEnum}`;
}) => {
try {
let responseContent = '';
const clientRes = async (data: string) => {
const { content = '' } = (() => {
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
return { content };
} catch (error) {
return {};
}
})();
if (data === '[DONE]') return;
!res.closed && content && res.write(content);
};
try {
for await (const chunk of chatResponse.data as any) {
if (res.closed) break;
const parse = parseStreamChunk(chunk);
parse.forEach((item) => clientRes(item.data));
}
} catch (error) {
console.log('pipe error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap.countTokens({
model,
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@ -0,0 +1,14 @@
export const getChatModel = (model: string) => {
return global.chatModels.find((item) => item.model === model);
};
export const getVectorModel = (model: string) => {
return global.vectorModels.find((item) => item.model === model);
};
export const getQAModel = (model: string) => {
return global.qaModels.find((item) => item.model === model);
};
export const getModel = (model: string) => {
return [...global.chatModels, ...global.vectorModels, ...global.qaModels].find(
(item) => item.model === model
);
};

View File

@ -4,7 +4,6 @@ import crypto from 'crypto';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA'; import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector'; import { generateVector } from '../events/generateVector';
import { sseResponseEventEnum } from '@/constants/chat';
/* 密码加密 */ /* 密码加密 */
export const hashPassword = (psw: string) => { export const hashPassword = (psw: string) => {
@ -33,20 +32,6 @@ export const clearCookie = (res: NextApiResponse) => {
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0'); res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
}; };
/* openai axios config */
export const axiosConfig = (apikey: string) => {
const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
return {
baseURL: apikey === process.env.ONEAPI_KEY ? process.env.ONEAPI_URL : openaiBaseUrl, // 此处仅对非 npm 模块有效
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${apikey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
};
};
export function withNextCors(handler: NextApiHandler): NextApiHandler { export function withNextCors(handler: NextApiHandler): NextApiHandler {
return async function nextApiHandlerWrappedWithNextCors( return async function nextApiHandlerWrappedWithNextCors(
req: NextApiRequest, req: NextApiRequest,

View File

@ -1,12 +1,8 @@
import { create } from 'zustand'; import { create } from 'zustand';
import { devtools } from 'zustand/middleware'; import { devtools } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer'; import { immer } from 'zustand/middleware/immer';
import type { InitDateResponse } from '@/pages/api/system/getInitData';
import { getInitData } from '@/api/system';
type State = { type State = {
initData: InitDateResponse;
loadInitData: () => Promise<void>;
loading: boolean; loading: boolean;
setLoading: (val: boolean) => null; setLoading: (val: boolean) => null;
screenWidth: number; screenWidth: number;
@ -17,19 +13,6 @@ type State = {
export const useGlobalStore = create<State>()( export const useGlobalStore = create<State>()(
devtools( devtools(
immer((set, get) => ({ immer((set, get) => ({
initData: {
beianText: '',
googleVerKey: '',
baiduTongji: false
},
async loadInitData() {
try {
const res = await getInitData();
set((state) => {
state.initData = res;
});
} catch (error) {}
},
loading: false, loading: false,
setLoading: (val: boolean) => { setLoading: (val: boolean) => {
set((state) => { set((state) => {

View File

@ -0,0 +1,36 @@
import {
type QAModelItemType,
type ChatModelItemType,
type VectorModelItemType
} from '@/types/model';
import type { InitDateResponse } from '@/pages/api/system/getInitData';
import { getInitData } from '@/api/system';
import { delay } from '@/utils/tools';
export let beianText = '';
export let googleVerKey = '';
export let baiduTongji = '';
export let chatModelList: ChatModelItemType[] = [];
export let qaModelList: QAModelItemType[] = [];
export let vectorModelList: VectorModelItemType[] = [];
let retryTimes = 3;
export const clientInitData = async (): Promise<InitDateResponse> => {
try {
const res = await getInitData();
chatModelList = res.chatModels;
qaModelList = res.qaModels;
vectorModelList = res.vectorModels;
beianText = res.beianText;
googleVerKey = res.googleVerKey;
baiduTongji = res.baiduTongji;
return res;
} catch (error) {
retryTimes--;
await delay(500);
return clientInitData();
}
};

View File

@ -42,7 +42,7 @@ export type ShareChatEditType = {
/* agent */ /* agent */
/* question classify */ /* question classify */
export type ClassifyQuestionAgentItemType = { export type RecognizeIntentionAgentItemType = {
value: string; value: string;
key: string; key: string;
}; };

View File

@ -8,9 +8,6 @@ export type ChatItemType = {
_id?: string; _id?: string;
obj: `${ChatRoleEnum}`; obj: `${ChatRoleEnum}`;
value: string; value: string;
quoteLen?: number;
quote?: QuoteItemType[];
systemPrompt?: string;
[key: string]: any; [key: string]: any;
}; };

View File

@ -2,6 +2,7 @@ import type { Mongoose } from 'mongoose';
import type { Agent } from 'http'; import type { Agent } from 'http';
import type { Pool } from 'pg'; import type { Pool } from 'pg';
import type { Tiktoken } from '@dqbd/tiktoken'; import type { Tiktoken } from '@dqbd/tiktoken';
import { ChatModelItemType, QAModelItemType, VectorModelItemType } from './model';
export type PagingData<T> = { export type PagingData<T> = {
pageNum: number; pageNum: number;
@ -16,9 +17,6 @@ declare global {
var mongodb: Mongoose | string | null; var mongodb: Mongoose | string | null;
var pgClient: Pool | null; var pgClient: Pool | null;
var httpsAgent: Agent; var httpsAgent: Agent;
var particlesJS: any;
var grecaptcha: any;
var QRCode: any;
var qaQueueLen: number; var qaQueueLen: number;
var vectorQueueLen: number; var vectorQueueLen: number;
var OpenAiEncMap: Tiktoken; var OpenAiEncMap: Tiktoken;
@ -30,8 +28,14 @@ declare global {
pgIvfflatProbe: number; pgIvfflatProbe: number;
sensitiveCheck: boolean; sensitiveCheck: boolean;
}; };
var chatModels: ChatModelItemType[];
var qaModels: QAModelItemType[];
var vectorModels: VectorModelItemType[];
interface Window { interface Window {
['pdfjs-dist/build/pdf']: any; ['pdfjs-dist/build/pdf']: any;
particlesJS: any;
grecaptcha: any;
QRCode: any;
} }
} }

19
client/src/types/model.d.ts vendored Normal file
View File

@ -0,0 +1,19 @@
export type ChatModelItemType = {
model: string;
name: string;
contextMaxToken: number;
systemMaxToken: number;
maxTemperature: number;
price: number;
};
export type QAModelItemType = {
model: string;
name: string;
maxToken: number;
price: number;
};
export type VectorModelItemType = {
model: string;
name: string;
price: number;
};

View File

@ -1,7 +1,7 @@
import type { ChatItemType } from './chat'; import type { ChatItemType } from './chat';
import { ModelNameEnum, ChatModelType, EmbeddingModelType } from '@/constants/model'; import { ModelNameEnum, ChatModelType, EmbeddingModelType } from '@/constants/model';
import type { DataType } from './data'; import type { DataType } from './data';
import { BillTypeEnum, InformTypeEnum } from '@/constants/user'; import { BillSourceEnum, InformTypeEnum } from '@/constants/user';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import type { AppModuleItemType } from './app'; import type { AppModuleItemType } from './app';
@ -38,17 +38,6 @@ export interface AppSchema {
avatar: string; avatar: string;
intro: string; intro: string;
updateTime: number; updateTime: number;
chat: {
relatedKbs: string[];
searchSimilarity: number;
searchLimit: number;
searchEmptyText: string;
systemPrompt: string;
limitPrompt: string;
temperature: number;
maxToken: number;
chatModel: ChatModelType; // 聊天时用的模型,训练后就是训练的模型
};
share: { share: {
isShare: boolean; isShare: boolean;
isShareDetail: boolean; isShareDetail: boolean;
@ -68,6 +57,7 @@ export interface TrainingDataSchema {
kbId: string; kbId: string;
expireAt: Date; expireAt: Date;
lockTime: Date; lockTime: Date;
model: string;
mode: `${TrainingModeEnum}`; mode: `${TrainingModeEnum}`;
prompt: string; prompt: string;
q: string; q: string;
@ -87,16 +77,21 @@ export interface ChatSchema {
content: ChatItemType[]; content: ChatItemType[];
} }
export type BillListItemType = {
moduleName: string;
amount: number;
model?: string;
tokenLen?: number;
};
export interface BillSchema { export interface BillSchema {
_id: string; _id: string;
userId: string; userId: string;
type: `${BillTypeEnum}`; appName: string;
modelName?: ChatModelType | EmbeddingModelType;
appId?: string; appId?: string;
source: `${BillSourceEnum}`;
time: Date; time: Date;
textLen: number; total: number;
tokenLen: number; list: BillListItemType[];
price: number;
} }
export interface PaySchema { export interface PaySchema {

View File

@ -1,3 +1,4 @@
import { BillSourceEnum } from '@/constants/user';
import type { BillSchema } from './mongoSchema'; import type { BillSchema } from './mongoSchema';
export interface UserType { export interface UserType {
_id: string; _id: string;
@ -19,9 +20,7 @@ export interface UserUpdateParams {
export interface UserBillType { export interface UserBillType {
id: string; id: string;
time: Date; time: Date;
modelName: string; appName: string;
type: BillSchema['type']; source: BillSchema['source'];
textLen: number; total: number;
tokenLen: number;
price: number;
} }

View File

@ -5,7 +5,6 @@ import { ChatItemType } from '@/types/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai'; import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions'; import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
import type { AppModuleItemType } from '@/types/app'; import type { AppModuleItemType } from '@/types/app';
import type { FlowModuleItemType } from '@/types/flow'; import type { FlowModuleItemType } from '@/types/flow';
import type { Edge, Node } from 'reactflow'; import type { Edge, Node } from 'reactflow';
@ -16,12 +15,10 @@ const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
export const adaptBill = (bill: BillSchema): UserBillType => { export const adaptBill = (bill: BillSchema): UserBillType => {
return { return {
id: bill._id, id: bill._id,
type: bill.type, source: bill.source,
modelName: ChatModelMap[bill.modelName as `${OpenAiChatEnum}`]?.name || bill.modelName,
time: bill.time, time: bill.time,
textLen: bill.textLen, total: formatPrice(bill.total),
tokenLen: bill.tokenLen, appName: bill.appName
price: formatPrice(bill.price)
}; };
}; };

View File

@ -50,10 +50,10 @@ export const adaptChatItem_openAI = ({
export function countOpenAIToken({ export function countOpenAIToken({
messages, messages,
model model = 'gpt-3.5-turbo'
}: { }: {
messages: ChatItemType[]; messages: ChatItemType[];
model: `${OpenAiChatEnum}`; model?: string;
}) { }) {
const diffVal = model.startsWith('gpt-3.5-turbo') ? 3 : 2; const diffVal = model.startsWith('gpt-3.5-turbo') ? 3 : 2;
@ -74,7 +74,7 @@ export const openAiSliceTextByToken = ({
text, text,
length length
}: { }: {
model: `${OpenAiChatEnum}`; model: string;
text: string; text: string;
length: number; length: number;
}) => { }) => {