From 51a5d450b7a678dd15c11e61b66b4009e7c41e86 Mon Sep 17 00:00:00 2001 From: archer <545436317@qq.com> Date: Sun, 21 May 2023 22:12:02 +0800 Subject: [PATCH] feat: content check --- .env.template | 2 + Dockerfile | 2 + docs/deploy/fastgpt/docker-compose.yml | 4 + docs/dev/README.md | 52 +++++---- src/constants/plugin.ts | 10 ++ src/pages/api/chat/chat.ts | 24 +++- src/pages/api/chat/shareChat/chat.ts | 25 ++++- src/pages/api/openapi/chat/chat.ts | 35 ++++-- src/pages/api/openapi/text/sensitiveCheck.ts | 48 ++++++++ src/service/api/request.ts | 110 +++++++++++++++++++ src/service/api/text.ts | 5 + src/service/utils/auth.ts | 8 +- src/service/utils/chat/openai.ts | 26 ++--- src/service/utils/tools.ts | 4 +- src/types/plugin.d.ts | 20 ++++ 15 files changed, 310 insertions(+), 65 deletions(-) create mode 100644 src/pages/api/openapi/text/sensitiveCheck.ts create mode 100644 src/service/api/request.ts create mode 100644 src/service/api/text.ts diff --git a/.env.template b/.env.template index f79926885..c8ddc38c1 100644 --- a/.env.template +++ b/.env.template @@ -16,6 +16,8 @@ aliTemplateCode=SMS_xxx TOKEN_KEY=xxx # root key, 最高权限 ROOT_KEY=xxx +# 是否进行安全校验(1: 开启,0: 关闭) +SENSITIVE_CHECK=1 # openai # OPENAI_BASE_URL=https://api.openai.com/v1 # OPENAI_BASE_URL_AUTH=可选的安全凭证(不需要的时候,记得去掉) diff --git a/Dockerfile b/Dockerfile index 0c60d1747..5b312044a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -52,6 +52,8 @@ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static USER nextjs +ENV PORT=3000 + EXPOSE 3000 CMD ["node", "server.js"] diff --git a/docs/deploy/fastgpt/docker-compose.yml b/docs/deploy/fastgpt/docker-compose.yml index 7ad4918f3..3df86fa9a 100644 --- a/docs/deploy/fastgpt/docker-compose.yml +++ b/docs/deploy/fastgpt/docker-compose.yml @@ -52,6 +52,10 @@ services: - aliTemplateCode=SMS_xxxx # token加密凭证(随便填,作为登录凭证) - TOKEN_KEY=xxxx + # root key, 最高权限 + - ROOT_KEY=xxx + # 是否进行安全校验(1: 开启,0: 关闭) + - SENSITIVE_CHECK=1 # 和上方mongo镜像的username,password对应 - MONGODB_URI=mongodb://username:password@0.0.0.0:27017/?authSource=admin - MONGODB_NAME=fastgpt diff --git a/docs/dev/README.md b/docs/dev/README.md index 7d240e6e7..d86163068 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -10,34 +10,38 @@ # proxy(可选) AXIOS_PROXY_HOST=127.0.0.1 AXIOS_PROXY_PORT=7890 -# openai 中转连接(可选) -OPENAI_BASE_URL=https://api.openai.com/v1 -OPENAI_BASE_URL_AUTH=可选的安全凭证 -# 是否开启队列任务。 1-开启,0-关闭(请求 parentUrl 去执行任务,单机时直接填1) +# 是否开启队列任务。 1-开启,0-关闭(请求parentUrl去执行任务,单机时直接填1) queueTask=1 parentUrl=https://hostname/api/openapi/startEvents -# 发送邮箱验证码配置。用的是 QQ 邮箱。参考 nodeMail 获取MAILE_CODE,自行百度。 -MY_MAIL=xxxx@qq.com -MAILE_CODE=xxxx -# 阿里短信服务(邮箱和短信至少二选一) -aliAccessKeyId=xxxx -aliAccessKeySecret=xxxx -aliSignName=xxxxx -aliTemplateCode=SMS_xxxx -# token加密凭证(随便填,作为登录凭证) -TOKEN_KEY=xxxx -queueTask=1 -parentUrl=https://hostname/api/openapi/startEvents -# 和mongo镜像的username,password对应 -MONGODB_URI=mongodb://username:passsword@0.0.0.0:27017/?authSource=admin -MONGODB_NAME=xxx +# email +MY_MAIL=xxx@qq.com +MAILE_CODE=xxx +# ali ems +aliAccessKeyId=xxx +aliAccessKeySecret=xxx +aliSignName=xxx +aliTemplateCode=SMS_xxx +# token +TOKEN_KEY=xxx +# root key, 最高权限 +ROOT_KEY=xxx +# 是否进行安全校验(1: 开启,0: 关闭) +SENSITIVE_CHECK=1 +# openai +# OPENAI_BASE_URL=https://api.openai.com/v1 +# OPENAI_BASE_URL_AUTH=可选的安全凭证(不需要的时候,记得去掉) +OPENAIKEY=sk-xxx +GPT4KEY=sk-xxx +# claude +CLAUDE_BASE_URL=calude模型请求地址 +CLAUDE_KEY=CLAUDE_KEY +# db +MONGODB_URI=mongodb://username:password@0.0.0.0:27017/test?authSource=admin PG_HOST=0.0.0.0 PG_PORT=8100 -# 和PG镜像对应. -PG_USER=fastgpt # POSTGRES_USER -PG_PASSWORD=1234 # POSTGRES_PASSWORD -PG_DB_NAME=fastgpt # POSTGRES_DB -OPENAIKEY=sk-xxxxx +PG_USER=xxx +PG_PASSWORD=xxx +PG_DB_NAME=xxx ``` ## 运行 diff --git a/src/constants/plugin.ts b/src/constants/plugin.ts index 83b301382..5bc9ac312 100644 --- a/src/constants/plugin.ts +++ b/src/constants/plugin.ts @@ -2,3 +2,13 @@ export enum SplitTextTypEnum { 'qa' = 'qa', 'subsection' = 'subsection' } + +export enum PluginTypeEnum { + LLM = 'LLM', + Text = 'Text', + Function = 'Function' +} + +export enum PluginParamsTypeEnum { + 'Text' = 'text' +} diff --git a/src/pages/api/chat/chat.ts b/src/pages/api/chat/chat.ts index 5de8cfce2..dcc459359 100644 --- a/src/pages/api/chat/chat.ts +++ b/src/pages/api/chat/chat.ts @@ -10,6 +10,7 @@ import { resStreamResponse } from '@/service/utils/chat'; import { searchKb } from '@/service/plugins/searchKb'; import { ChatRoleEnum } from '@/constants/chat'; import { BillTypeEnum } from '@/constants/user'; +import { sensitiveCheck } from '@/service/api/text'; /* 发送提示词 */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -44,6 +45,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) // 读取对话内容 const prompts = [...content, prompt]; + let systemPrompts: { + obj: ChatRoleEnum; + value: string; + }[] = []; // 使用了知识库搜索 if (model.chat.relatedKbs.length > 0) { @@ -60,16 +65,23 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) return res.send(searchPrompts[0]?.value); } - prompts.splice(prompts.length - 3, 0, ...searchPrompts); - } else { - // 没有用知识库搜索,仅用系统提示词 - model.chat.systemPrompt && - prompts.splice(prompts.length - 3, 0, { + systemPrompts = searchPrompts; + } else if (model.chat.systemPrompt) { + systemPrompts = [ + { obj: ChatRoleEnum.System, value: model.chat.systemPrompt - }); + } + ]; } + prompts.splice(prompts.length - 3, 0, ...systemPrompts); + + // content check + await sensitiveCheck({ + input: [...systemPrompts, prompt].map((item) => item.value).join('') + }); + // 计算温度 const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed( 2 diff --git a/src/pages/api/chat/shareChat/chat.ts b/src/pages/api/chat/shareChat/chat.ts index 17676897d..3ebbbcfde 100644 --- a/src/pages/api/chat/shareChat/chat.ts +++ b/src/pages/api/chat/shareChat/chat.ts @@ -10,6 +10,7 @@ import { resStreamResponse } from '@/service/utils/chat'; import { searchKb } from '@/service/plugins/searchKb'; import { ChatRoleEnum } from '@/constants/chat'; import { BillTypeEnum } from '@/constants/user'; +import { sensitiveCheck } from '@/service/api/text'; /* 发送提示词 */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -41,6 +42,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) const modelConstantsData = ChatModelMap[model.chat.chatModel]; + let systemPrompts: { + obj: ChatRoleEnum; + value: string; + }[] = []; + // 使用了知识库搜索 if (model.chat.relatedKbs.length > 0) { const { code, searchPrompts } = await searchKb({ @@ -56,16 +62,23 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) return res.send(searchPrompts[0]?.value); } - prompts.splice(prompts.length - 3, 0, ...searchPrompts); - } else { - // 没有用知识库搜索,仅用系统提示词 - model.chat.systemPrompt && - prompts.splice(prompts.length - 3, 0, { + systemPrompts = searchPrompts; + } else if (model.chat.systemPrompt) { + systemPrompts = [ + { obj: ChatRoleEnum.System, value: model.chat.systemPrompt - }); + } + ]; } + prompts.splice(prompts.length - 3, 0, ...systemPrompts); + + // content check + await sensitiveCheck({ + input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('') + }); + // 计算温度 const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed( 2 diff --git a/src/pages/api/openapi/chat/chat.ts b/src/pages/api/openapi/chat/chat.ts index 64881fad4..e15d861cd 100644 --- a/src/pages/api/openapi/chat/chat.ts +++ b/src/pages/api/openapi/chat/chat.ts @@ -10,6 +10,7 @@ import { searchKb } from '@/service/plugins/searchKb'; import { ChatRoleEnum } from '@/constants/chat'; import { withNextCors } from '@/service/utils/tools'; import { BillTypeEnum } from '@/constants/user'; +import { sensitiveCheck } from '@/service/api/text'; /* 发送提示词 */ export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -62,13 +63,16 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex const modelConstantsData = ChatModelMap[model.chat.chatModel]; + let systemPrompts: { + obj: ChatRoleEnum; + value: string; + }[] = []; + // 使用了知识库搜索 if (model.chat.relatedKbs.length > 0) { - const similarity = ModelVectorSearchModeMap[model.chat.searchMode]?.similarity || 0.22; - const { code, searchPrompts } = await searchKb({ prompts, - similarity, + similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity, model, userId }); @@ -77,18 +81,29 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex if (code === 201) { return isStream ? res.send(searchPrompts[0]?.value) - : jsonRes(res, { data: searchPrompts[0]?.value }); + : jsonRes(res, { + data: searchPrompts[0]?.value, + message: searchPrompts[0]?.value + }); } - prompts.splice(prompts.length - 3, 0, ...searchPrompts); - } else { - // 没有用知识库搜索,仅用系统提示词 - model.chat.systemPrompt && - prompts.splice(prompts.length - 3, 0, { + + systemPrompts = searchPrompts; + } else if (model.chat.systemPrompt) { + systemPrompts = [ + { obj: ChatRoleEnum.System, value: model.chat.systemPrompt - }); + } + ]; } + prompts.splice(prompts.length - 3, 0, ...systemPrompts); + + // content check + await sensitiveCheck({ + input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('') + }); + // 计算温度 const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed( 2 diff --git a/src/pages/api/openapi/text/sensitiveCheck.ts b/src/pages/api/openapi/text/sensitiveCheck.ts new file mode 100644 index 000000000..c9921040b --- /dev/null +++ b/src/pages/api/openapi/text/sensitiveCheck.ts @@ -0,0 +1,48 @@ +// Next.js API route support: https://nextjs.org/docs/api-routes/introduction +import type { NextApiRequest, NextApiResponse } from 'next'; +import { jsonRes } from '@/service/response'; +import { authUser, getSystemOpenAiKey } from '@/service/utils/auth'; +import type { TextPluginRequestParams } from '@/types/plugin'; +import axios from 'axios'; +import { axiosConfig } from '@/service/utils/tools'; + +export default async function handler(req: NextApiRequest, res: NextApiResponse) { + try { + if (process.env.SENSITIVE_CHECK !== '1') { + return jsonRes(res); + } + + await authUser({ req }); + + const { input } = req.body as TextPluginRequestParams; + + const response = await axios({ + ...axiosConfig(getSystemOpenAiKey()), + method: 'POST', + url: `/moderations`, + data: { + input + } + }); + + const data = (response.data.results?.[0]?.category_scores as Record) || {}; + + const values = Object.values(data); + + for (const val of values) { + if (val > 0.2) { + return jsonRes(res, { + code: 500, + message: '您的内容不合规' + }); + } + } + + jsonRes(res); + } catch (err) { + jsonRes(res, { + code: 500, + error: err + }); + } +} diff --git a/src/service/api/request.ts b/src/service/api/request.ts new file mode 100644 index 000000000..592c253be --- /dev/null +++ b/src/service/api/request.ts @@ -0,0 +1,110 @@ +import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios'; + +interface ConfigType { + headers?: { [key: string]: string }; + hold?: boolean; +} +interface ResponseDataType { + code: number; + message: string; + data: any; +} + +/** + * 请求开始 + */ +function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig { + if (config.headers) { + config.headers.rootkey = process.env.ROOT_KEY; + } + + return config; +} + +/** + * 请求成功,检查请求头 + */ +function responseSuccess(response: AxiosResponse) { + return response; +} +/** + * 响应数据检查 + */ +function checkRes(data: ResponseDataType) { + if (data === undefined) { + return Promise.reject('服务器异常'); + } else if (data.code < 200 || data.code >= 400) { + return Promise.reject(data); + } + return data.data; +} + +/** + * 响应错误 + */ +function responseError(err: any) { + if (!err) { + return Promise.reject({ message: '未知错误' }); + } + if (typeof err === 'string') { + return Promise.reject({ message: err }); + } + return Promise.reject(err); +} + +/* 创建请求实例 */ +const instance = axios.create({ + timeout: 60000, // 超时时间 + headers: { + 'content-type': 'application/json' + } +}); + +/* 请求拦截 */ +instance.interceptors.request.use(requestStart, (err) => Promise.reject(err)); +/* 响应拦截 */ +instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err)); + +function request(url: string, data: any, config: ConfigType, method: Method): any { + /* 去空 */ + for (const key in data) { + if (data[key] === null || data[key] === undefined) { + delete data[key]; + } + } + + return instance + .request({ + baseURL: `http://localhost:${process.env.PORT || 3000}/api`, + url, + method, + data: method === 'GET' ? null : data, + params: method === 'GET' ? data : null, // get请求不携带data,params放在url上 + ...config // 用户自定义配置,可以覆盖前面的配置 + }) + .then((res) => checkRes(res.data)) + .catch((err) => responseError(err)); +} + +/** + * api请求方式 + * @param {String} url + * @param {Any} params + * @param {Object} config + * @returns + */ +export function GET(url: string, params = {}, config: ConfigType = {}): Promise { + return request(url, params, config, 'GET'); +} + +export function POST(url: string, data = {}, config: ConfigType = {}): Promise { + return request(url, data, config, 'POST'); +} + +export function PUT(url: string, data = {}, config: ConfigType = {}): Promise { + return request(url, data, config, 'PUT'); +} + +export function DELETE(url: string, config: ConfigType = {}): Promise { + return request(url, {}, config, 'DELETE'); +} diff --git a/src/service/api/text.ts b/src/service/api/text.ts new file mode 100644 index 000000000..33794aaaf --- /dev/null +++ b/src/service/api/text.ts @@ -0,0 +1,5 @@ +import { POST } from './request'; +import type { TextPluginRequestParams } from '@/types/plugin'; + +export const sensitiveCheck = (data: TextPluginRequestParams) => + POST('/openapi/text/sensitiveCheck', data); diff --git a/src/service/utils/auth.ts b/src/service/utils/auth.ts index 0c6d7d7d6..f407b9386 100644 --- a/src/service/utils/auth.ts +++ b/src/service/utils/auth.ts @@ -66,8 +66,8 @@ export const authUser = async ({ return Promise.reject(error); } }; - const parseRootKey = async (rootKey?: string, userId?: string) => { - if (!rootKey || !userId || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) { + const parseRootKey = async (rootKey?: string, userId = '') => { + if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) { return Promise.reject(ERROR_ENUM.unAuthorization); } return userId; @@ -104,7 +104,7 @@ export const authUser = async ({ }; /* random get openai api key */ -export const getOpenAiKey = () => { +export const getSystemOpenAiKey = () => { // 纯字符串类型 const keys = process.env.OPENAIKEY?.split(',') || []; const i = Math.floor(Math.random() * keys.length); @@ -129,7 +129,7 @@ export const getApiKey = async ({ const keyMap = { [OpenAiChatEnum.GPT35]: { userOpenAiKey: user.openaiKey || '', - systemAuthKey: getOpenAiKey() as string + systemAuthKey: getSystemOpenAiKey() as string }, [OpenAiChatEnum.GPT4]: { userOpenAiKey: user.openaiKey || '', diff --git a/src/service/utils/chat/openai.ts b/src/service/utils/chat/openai.ts index 95a2993fc..7e4559e46 100644 --- a/src/service/utils/chat/openai.ts +++ b/src/service/utils/chat/openai.ts @@ -7,16 +7,14 @@ import { adaptChatItem_openAI } from '@/utils/chat/openai'; import { modelToolMap } from '@/utils/chat'; import { ChatCompletionType, ChatContextFilter, StreamResponseType } from './index'; import { ChatRoleEnum } from '@/constants/chat'; -import { getOpenAiKey } from '../auth'; +import { getSystemOpenAiKey } from '../auth'; -export const getOpenAIApi = (apiKey: string) => { - const configuration = new Configuration({ - apiKey, - basePath: process.env.OPENAI_BASE_URL - }); - - return new OpenAIApi(configuration); -}; +export const getOpenAIApi = () => + new OpenAIApi( + new Configuration({ + basePath: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1' + }) + ); /* 获取向量 */ export const openaiCreateEmbedding = async ({ @@ -28,10 +26,10 @@ export const openaiCreateEmbedding = async ({ userId: string; textArr: string[]; }) => { - const systemAuthKey = getOpenAiKey(); + const systemAuthKey = getSystemOpenAiKey(); // 获取 chatAPI - const chatAPI = getOpenAIApi(userOpenAiKey || systemAuthKey); + const chatAPI = getOpenAIApi(); // 把输入的内容转成向量 const res = await chatAPI @@ -42,7 +40,7 @@ export const openaiCreateEmbedding = async ({ }, { timeout: 60000, - ...axiosConfig() + ...axiosConfig(userOpenAiKey || systemAuthKey) } ) .then((res) => ({ @@ -78,7 +76,7 @@ export const chatResponse = async ({ }); const adaptMessages = adaptChatItem_openAI({ messages: filterMessages }); - const chatAPI = getOpenAIApi(apiKey); + const chatAPI = getOpenAIApi(); const response = await chatAPI.createChatCompletion( { @@ -93,7 +91,7 @@ export const chatResponse = async ({ { timeout: stream ? 60000 : 240000, responseType: stream ? 'stream' : 'json', - ...axiosConfig() + ...axiosConfig(apiKey) } ); diff --git a/src/service/utils/tools.ts b/src/service/utils/tools.ts index 106528888..8615020f5 100644 --- a/src/service/utils/tools.ts +++ b/src/service/utils/tools.ts @@ -31,9 +31,11 @@ export const clearCookie = (res: NextApiResponse) => { }; /* openai axios config */ -export const axiosConfig = () => ({ +export const axiosConfig = (apikey: string) => ({ + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', httpsAgent: global.httpsAgent, headers: { + Authorization: `Bearer ${apikey}`, auth: process.env.OPENAI_BASE_URL_AUTH || '' } }); diff --git a/src/types/plugin.d.ts b/src/types/plugin.d.ts index f122c5266..7448e8987 100644 --- a/src/types/plugin.d.ts +++ b/src/types/plugin.d.ts @@ -1,10 +1,12 @@ import type { kbSchema } from './mongoSchema'; +import { PluginTypeEnum } from '@/constants/plugin'; /* kb type */ export interface KbItemType extends kbSchema { totalData: number; tags: string; } + export interface KbDataItemType { id: string; status: 'waiting' | 'ready'; @@ -13,3 +15,21 @@ export interface KbDataItemType { kbId: string; userId: string; } + +/* plugin */ +export interface PluginConfig { + name: string; + desc: string; + url: string; + category: `${PluginTypeEnum}`; + uniPrice: 22; // 1k token + params: [ + { + type: ''; + } + ]; +} + +export type TextPluginRequestParams = { + input: string; +};