v4.5 (#403)
This commit is contained in:
parent
dd8f2744bf
commit
3b776b6639
@ -9,14 +9,12 @@ ARG name
|
|||||||
# copy packages and one project
|
# copy packages and one project
|
||||||
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
|
COPY package.json pnpm-lock.yaml pnpm-workspace.yaml ./
|
||||||
COPY ./packages ./packages
|
COPY ./packages ./packages
|
||||||
COPY ./projects/$name ./projects/$name
|
COPY ./projects/$name/package.json ./projects/$name/package.json
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
[ -f pnpm-lock.yaml ] && pnpm install || \
|
[ -f pnpm-lock.yaml ] && pnpm install || \
|
||||||
(echo "Lockfile not found." && exit 1)
|
(echo "Lockfile not found." && exit 1)
|
||||||
|
|
||||||
RUN pnpm prune
|
|
||||||
|
|
||||||
# Rebuild the source code only when needed
|
# Rebuild the source code only when needed
|
||||||
FROM node:current-alpine AS builder
|
FROM node:current-alpine AS builder
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@ -24,9 +22,11 @@ WORKDIR /app
|
|||||||
ARG name
|
ARG name
|
||||||
|
|
||||||
# copy common node_modules and one project node_modules
|
# copy common node_modules and one project node_modules
|
||||||
|
COPY package.json pnpm-workspace.yaml ./
|
||||||
COPY --from=deps /app/node_modules ./node_modules
|
COPY --from=deps /app/node_modules ./node_modules
|
||||||
COPY --from=deps /app/packages ./packages
|
COPY --from=deps /app/packages ./packages
|
||||||
COPY --from=deps /app/projects/$name ./projects/$name
|
COPY ./projects/$name ./projects/$name
|
||||||
|
COPY --from=deps /app/projects/$name/node_modules ./projects/$name/node_modules
|
||||||
|
|
||||||
# Uncomment the following line in case you want to disable telemetry during the build.
|
# Uncomment the following line in case you want to disable telemetry during the build.
|
||||||
ENV NEXT_TELEMETRY_DISABLED 1
|
ENV NEXT_TELEMETRY_DISABLED 1
|
||||||
|
|||||||
BIN
docSite/assets/imgs/v45-1.png
Normal file
BIN
docSite/assets/imgs/v45-1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.3 MiB |
BIN
docSite/assets/imgs/v45-2.png
Normal file
BIN
docSite/assets/imgs/v45-2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 286 KiB |
BIN
docSite/assets/imgs/v45-3.png
Normal file
BIN
docSite/assets/imgs/v45-3.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 382 KiB |
BIN
docSite/assets/imgs/v45-4.png
Normal file
BIN
docSite/assets/imgs/v45-4.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 289 KiB |
@ -63,15 +63,15 @@ Authorization 为 sk-aaabbbcccdddeeefffggghhhiiijjjkkk。model 为刚刚在 One
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
"ChatModels": [
|
"ChatModels": [
|
||||||
//已有模型
|
//其他对话模型
|
||||||
{
|
{
|
||||||
"model": "chatglm2",
|
"model": "chatglm2",
|
||||||
"name": "chatglm2",
|
"name": "chatglm2",
|
||||||
"contextMaxToken": 8000,
|
"maxToken": 8000,
|
||||||
|
"price": 0,
|
||||||
"quoteMaxToken": 4000,
|
"quoteMaxToken": 4000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"defaultSystemChatPrompt": ""
|
||||||
"defaultSystem": ""
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"VectorModels": [
|
"VectorModels": [
|
||||||
|
|||||||
@ -107,11 +107,11 @@ Authorization 为 sk-aaabbbcccdddeeefffggghhhiiijjjkkk。model 为刚刚在 One
|
|||||||
{
|
{
|
||||||
"model": "chatglm2",
|
"model": "chatglm2",
|
||||||
"name": "chatglm2",
|
"name": "chatglm2",
|
||||||
"contextMaxToken": 8000,
|
"maxToken": 8000,
|
||||||
|
"price": 0,
|
||||||
"quoteMaxToken": 4000,
|
"quoteMaxToken": 4000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"defaultSystemChatPrompt": ""
|
||||||
"defaultSystem": ""
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|||||||
@ -27,31 +27,75 @@ weight: 520
|
|||||||
},
|
},
|
||||||
"ChatModels": [
|
"ChatModels": [
|
||||||
{
|
{
|
||||||
"model": "gpt-3.5-turbo",
|
"model": "gpt-3.5-turbo", // 实际调用的模型
|
||||||
"name": "GPT35-4k",
|
"name": "GPT35-4k", // 展示的名字
|
||||||
"contextMaxToken": 4000, // 最大token,均按 gpt35 计算
|
"maxToken": 4000, // 最大token,均按 gpt35 计算
|
||||||
"quoteMaxToken": 2000, // 引用内容最大 token
|
"quoteMaxToken": 2000, // 引用内容最大 token
|
||||||
"maxTemperature": 1.2, // 最大温度
|
"maxTemperature": 1.2, // 最大温度
|
||||||
"price": 0,
|
"price": 0,
|
||||||
"defaultSystem": ""
|
"defaultSystemChatPrompt": ""
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"model": "gpt-3.5-turbo-16k",
|
"model": "gpt-3.5-turbo-16k",
|
||||||
"name": "GPT35-16k",
|
"name": "GPT35-16k",
|
||||||
"contextMaxToken": 16000,
|
"maxToken": 16000,
|
||||||
"quoteMaxToken": 8000,
|
"quoteMaxToken": 8000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"price": 0,
|
||||||
"defaultSystem": ""
|
"defaultSystemChatPrompt": ""
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"model": "gpt-4",
|
"model": "gpt-4",
|
||||||
"name": "GPT4-8k",
|
"name": "GPT4-8k",
|
||||||
"contextMaxToken": 8000,
|
"maxToken": 8000,
|
||||||
"quoteMaxToken": 4000,
|
"quoteMaxToken": 4000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"price": 0,
|
||||||
"defaultSystem": ""
|
"defaultSystemChatPrompt": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"QAModel": [ // QA 拆分模型
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
|
"price": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"ExtractModels": [ // 内容提取模型
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
|
"price": 0,
|
||||||
|
"functionCall": true, // 是否支持 function call
|
||||||
|
"functionPrompt": "" // 自定义非 function call 提示词
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"CQModels": [ // Classify Question: 问题分类模型
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
|
"price": 0,
|
||||||
|
"functionCall": true,
|
||||||
|
"functionPrompt": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "gpt-4",
|
||||||
|
"name": "GPT4-8k",
|
||||||
|
"maxToken": 8000,
|
||||||
|
"price": 0,
|
||||||
|
"functionCall": true,
|
||||||
|
"functionPrompt": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"QGModels": [ // Question Generation: 生成下一步指引模型
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo",
|
||||||
|
"name": "GPT35-4k",
|
||||||
|
"maxToken": 4000,
|
||||||
|
"price": 0
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"VectorModels": [
|
"VectorModels": [
|
||||||
@ -62,36 +106,6 @@ weight: 520
|
|||||||
"defaultToken": 500,
|
"defaultToken": 500,
|
||||||
"maxToken": 3000
|
"maxToken": 3000
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"QAModel": { // QA 拆分模型
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0
|
|
||||||
},
|
|
||||||
"ExtractModel": { // 内容提取模型
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"functionCall": true, // 是否使用 functionCall
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": ""
|
|
||||||
},
|
|
||||||
"CQModel": { // Classify Question: 问题分类模型
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"functionCall": true,
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": ""
|
|
||||||
},
|
|
||||||
"QGModel": { // Question Generation: 生成下一步指引模型
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"name": "GPT35-4k",
|
|
||||||
"maxToken": 4000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": "",
|
|
||||||
"functionCall": false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
@ -139,6 +139,21 @@ docker-compose 端口定义为:`映射端口:运行端口`。
|
|||||||
|
|
||||||
(自行补习 docker 基本知识)
|
(自行补习 docker 基本知识)
|
||||||
|
|
||||||
|
### relation "modeldata" does not exist
|
||||||
|
|
||||||
|
PG 数据库没有连接上/初始化失败,可以查看日志。FastGPT 会在每次连接上 PG 时进行表初始化,如果报错会有对应日志。
|
||||||
|
|
||||||
|
1. 检查数据库容器是否正常启动
|
||||||
|
2. 非 docker 部署的,需要手动安装 pg vector 插件
|
||||||
|
3. 查看 fastgpt 日志,有没有相关报错
|
||||||
|
|
||||||
|
### Operation `auth_codes.findOne()` buffering timed out after 10000ms
|
||||||
|
|
||||||
|
mongo连接失败,检查
|
||||||
|
1. mongo 服务有没有起来(有些 cpu 不支持 AVX,无法用 mongo5,需要换成 mongo4.x,可以dockerhub找个最新的4.x,修改镜像版本,重新运行)
|
||||||
|
2. 环境变量(账号密码,注意host和port)
|
||||||
|
|
||||||
|
|
||||||
### 错误排查方式
|
### 错误排查方式
|
||||||
|
|
||||||
遇到问题先按下面方式排查。
|
遇到问题先按下面方式排查。
|
||||||
|
|||||||
@ -99,12 +99,12 @@ CHAT_API_KEY=sk-xxxxxx
|
|||||||
{
|
{
|
||||||
"model": "ERNIE-Bot", // 这里的模型需要对应 One API 的模型
|
"model": "ERNIE-Bot", // 这里的模型需要对应 One API 的模型
|
||||||
"name": "文心一言", // 对外展示的名称
|
"name": "文心一言", // 对外展示的名称
|
||||||
"contextMaxToken": 4000, // 最大长下文 token,无论什么模型都按 GPT35 的计算。GPT 外的模型需要自行大致计算下这个值。可以调用官方接口去比对 Token 的倍率,然后在这里粗略计算。
|
"maxToken": 4000, // 最大长下文 token,无论什么模型都按 GPT35 的计算。GPT 外的模型需要自行大致计算下这个值。可以调用官方接口去比对 Token 的倍率,然后在这里粗略计算。
|
||||||
// 例如:文心一言的中英文 token 基本是 1:1,而 GPT 的中文 Token 是 2:1,如果文心一言官方最大 Token 是 4000,那么这里就可以填 8000,保险点就填 7000.
|
// 例如:文心一言的中英文 token 基本是 1:1,而 GPT 的中文 Token 是 2:1,如果文心一言官方最大 Token 是 4000,那么这里就可以填 8000,保险点就填 7000.
|
||||||
|
"price": 0, // 1个token 价格 => 1.5 / 100000 * 1000 = 0.015元/1k token
|
||||||
"quoteMaxToken": 2000, // 引用知识库的最大 Token
|
"quoteMaxToken": 2000, // 引用知识库的最大 Token
|
||||||
"maxTemperature": 1, // 最大温度
|
"maxTemperature": 1, // 最大温度
|
||||||
"price": 0, // 1个token 价格 => 1.5 / 100000 * 1000 = 0.015元/1k token
|
"defaultSystemChatPrompt": "" // 默认的系统提示词
|
||||||
"defaultSystem": "" // 默认的系统提示词
|
|
||||||
}
|
}
|
||||||
...
|
...
|
||||||
],
|
],
|
||||||
|
|||||||
84
docSite/content/docs/installation/upgrading/45.md
Normal file
84
docSite/content/docs/installation/upgrading/45.md
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
---
|
||||||
|
title: 'V4.5(需进行较为复杂更新)'
|
||||||
|
description: 'FastGPT V4.5 更新'
|
||||||
|
icon: 'upgrade'
|
||||||
|
draft: false
|
||||||
|
toc: true
|
||||||
|
weight: 839
|
||||||
|
---
|
||||||
|
|
||||||
|
FastGPT V4.5 引入 PgVector0.5 版本的 HNSW 索引,极大的提高了知识库检索的速度,比起`IVFFlat`索引大致有3~10倍的性能提升,可轻松实现百万数据毫秒级搜索。缺点在于构建索引的速度非常慢,4c16g 500w 组数据使用`并行构建`大约花了 48 小时。具体参数配置可参考 [PgVector官方](https://github.com/pgvector/pgvector)
|
||||||
|
|
||||||
|
下面需要对数据库进行一些操作升级:
|
||||||
|
|
||||||
|
## PgVector升级:Sealos 部署方案
|
||||||
|
|
||||||
|
1. 点击[Sealos桌面](https://cloud.sealos.io)的数据库应用。
|
||||||
|
2. 点击【pg】数据库的详情。
|
||||||
|
3. 点击右上角的重启,等待重启完成。
|
||||||
|
4. 点击左侧的一键链接,等待打开 Terminal。
|
||||||
|
5. 依次输入下方 sql 命令
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- 升级插件名
|
||||||
|
ALTER EXTENSION vector UPDATE;
|
||||||
|
-- 插件是否升级成功,成功的话,vector插件版本为 0.5.0,旧版的为 0.4.1
|
||||||
|
\dx
|
||||||
|
|
||||||
|
-- 下面两个语句会设置 pg 在构建索引时可用的内存大小,需根据自身的数据库规格来动态配置,可配置为 1/4 的内存大小
|
||||||
|
alter system set maintenance_work_mem = '2400MB';
|
||||||
|
select pg_reload_conf();
|
||||||
|
|
||||||
|
-- 开始构建索引,该索引构建时间非常久,直接点击右上角的叉,退出 Terminal 即可
|
||||||
|
CREATE INDEX CONCURRENTLY vector_index ON modeldata USING hnsw (vector vector_ip_ops) WITH (m = 16, ef_construction = 64);
|
||||||
|
-- 可以再次点击一键链接,进入 Terminal,输入下方命令,如果看到 "vector_index" hnsw (vector vector_ip_ops) WITH (m='16', ef_construction='64') 则代表构建完成(注意,后面没有 INVALID)
|
||||||
|
\d modeldata
|
||||||
|
```
|
||||||
|
|
||||||
|
| | |
|
||||||
|
| --------------------- | --------------------- |
|
||||||
|
|  |  |
|
||||||
|
|  |  |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## PgVector升级:Docker-compose.yml 部署方案
|
||||||
|
|
||||||
|
下面的命令是基于给的 docker-compose 模板,如果数据库账号密码更换了,请自行调整。
|
||||||
|
|
||||||
|
1. 修改 `docker-compose.yml` 中pg的镜像版本,改成 `ankane/pgvector:v0.5.0` 或 `registry.cn-hangzhou.aliyuncs.com/fastgpt/pgvector:v0.5.0`
|
||||||
|
2. 重启 pg 容器(docker-compose pull && docker-compose up -d),等待重启完成。
|
||||||
|
3. 进入容器: `docker exec -it pg bash`
|
||||||
|
4. 连接数据库: `psql 'postgresql://username:password@localhost:5432/postgres'`
|
||||||
|
5. 执行下面 sql 命令
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- 升级插件名
|
||||||
|
ALTER EXTENSION vector UPDATE;
|
||||||
|
-- 插件是否升级成功,成功的话,vector插件版本为 0.5.0,旧版的为 0.4.2
|
||||||
|
\dx
|
||||||
|
|
||||||
|
-- 下面两个语句会设置 pg 在构建索引时可用的内存大小,需根据自身的数据库规格来动态配置,可配置为 1/4 的内存大小
|
||||||
|
alter system set maintenance_work_mem = '2400MB';
|
||||||
|
select pg_reload_conf();
|
||||||
|
|
||||||
|
-- 开始构建索引,该索引构建时间非常久,直接关掉终端即可,不要使用 ctrl+c 关闭
|
||||||
|
CREATE INDEX CONCURRENTLY vector_index ON modeldata USING hnsw (vector vector_ip_ops) WITH (m = 16, ef_construction = 64);
|
||||||
|
-- 可以再次连接数据库,输入下方命令。如果看到 "vector_index" hnsw (vector vector_ip_ops) WITH (m='16', ef_construction='64') 则代表构建完成(注意,后面没有 INVALID)
|
||||||
|
\d modeldata
|
||||||
|
```
|
||||||
|
|
||||||
|
## 版本新功能介绍
|
||||||
|
|
||||||
|
### Fast GPT V4.5
|
||||||
|
|
||||||
|
1. 新增 - 升级 PgVector 插件,引入 HNSW 索引,极大加快的知识库搜索速度。
|
||||||
|
2. 新增 - AI对话模块,增加【返回AI内容】选项,可控制 AI 的内容不直接返回浏览器。
|
||||||
|
3. 新增 - 支持问题分类选择模型
|
||||||
|
4. 优化 - TextSplitter,采用递归拆解法。
|
||||||
|
5. 优化 - 高级编排 UX 性能
|
||||||
|
6. 修复 - 分享链接鉴权问题
|
||||||
|
|
||||||
|
## 该版本需要修改 `config.json` 文件
|
||||||
|
|
||||||
|
最新配置可参考: [V45版本最新 config.json](/docs/development/configuration)
|
||||||
94
docSite/content/docs/use-cases/ai_settings.md
Normal file
94
docSite/content/docs/use-cases/ai_settings.md
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
---
|
||||||
|
title: "AI 高级配置说明"
|
||||||
|
description: "FastGPT AI 高级配置说明"
|
||||||
|
icon: "sign_language"
|
||||||
|
draft: false
|
||||||
|
toc: true
|
||||||
|
weight: 310
|
||||||
|
---
|
||||||
|
|
||||||
|
在 FastGPT 的 AI 对话模块中,有一个 AI 高级配置,里面包含了 AI 模型的参数配置,本文详细介绍这些配置的含义。
|
||||||
|
|
||||||
|
# 返回AI内容
|
||||||
|
|
||||||
|
这是一个开关,打开的时候,当 AI 对话模块运行时,会将其输出的内容返回到浏览器(API响应);如果关闭,AI 输出的内容不会返回到浏览器,但是生成的内容仍可以通过【AI回复】进行输出。你可以将【AI回复】连接到其他模块中。
|
||||||
|
|
||||||
|
# 温度
|
||||||
|
|
||||||
|
可选范围0-10,约大代表生成的内容约自由扩散,越小代表约严谨。调节能力有限,知识库问答场景通常设置为0。
|
||||||
|
|
||||||
|
# 回复上限
|
||||||
|
|
||||||
|
控制 AI 回复的最大 Tokens,较小的值可以一定程度上减少 AI 的废话,但也可能导致 AI 回复不完整。
|
||||||
|
|
||||||
|
# 引用模板 & 引用提示词
|
||||||
|
|
||||||
|
这两个参数与知识库问答场景相关,可以控制知识库相关的提示词。
|
||||||
|
|
||||||
|
## AI 对话消息组成
|
||||||
|
|
||||||
|
想使用明白这两个变量,首先要了解传递传递给 AI 模型的消息格式。它是一个数组,FastGPT 中这个数组的组成形式为:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
内置提示词(config.json 配置,一般为空)
|
||||||
|
系统提示词 (用户输入的提示词)
|
||||||
|
历史记录
|
||||||
|
问题(由引用提示词、引用模板和用户问题组成)
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
{{% alert icon="🍅" context="success" %}}
|
||||||
|
Tips: 可以通过点击上下文按键查看完整的
|
||||||
|
{{% /alert %}}
|
||||||
|
|
||||||
|
## 引用模板和提示词设计
|
||||||
|
|
||||||
|
引用模板和引用提示词通常是成对出现,引用提示词依赖引用模板。
|
||||||
|
|
||||||
|
FastGPT 知识库采用 QA 对(不一定都是问答格式,仅代表两个变量)的格式存储,在转义成字符串时候会根据**引用模板**来进行格式化。知识库包含 3 个变量: q, a, file_id, index, source,可以通过 {{q}} {{a}} {{file_id}} {{index}} {{source}} 按需引入。下面一个模板例子:
|
||||||
|
|
||||||
|
**引用模板**
|
||||||
|
|
||||||
|
```
|
||||||
|
{instruction:"{{q}}",output:"{{a}}",source:"{{source}}"}
|
||||||
|
```
|
||||||
|
|
||||||
|
搜索到的知识库,会自动将 q,a,source 替换成对应的内容。每条搜索到的内容,会通过 `\n` 隔开。例如:
|
||||||
|
```
|
||||||
|
{instruction:"电影《铃芽之旅》的导演是谁?",output:"电影《铃芽之旅》的导演是新海诚。",source:"手动输入"}
|
||||||
|
{instruction:"本作的主人公是谁?",output:"本作的主人公是名叫铃芽的少女。",source:""}
|
||||||
|
{instruction:"电影《铃芽之旅》男主角是谁?",output:"电影《铃芽之旅》男主角是宗像草太,由松村北斗配音。",source:""}
|
||||||
|
{instruction:"电影《铃芽之旅》的编剧是谁?22",output:"新海诚是本片的编剧。",source:"手动输入"}
|
||||||
|
```
|
||||||
|
|
||||||
|
**引用提示词**
|
||||||
|
|
||||||
|
引用模板需要和引用提示词一起使用,提示词中可以写引用模板的格式说明以及对话的要求等。可以使用 {{quote}} 来使用 **引用模板**,使用 {{question}} 来引入问题。例如:
|
||||||
|
|
||||||
|
```
|
||||||
|
你的背景知识:
|
||||||
|
"""
|
||||||
|
{{quote}}
|
||||||
|
"""
|
||||||
|
对话要求:
|
||||||
|
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
||||||
|
2. 使用背景知识回答问题。
|
||||||
|
3. 背景知识无法回答问题时,你可以礼貌的的回答用户问题。
|
||||||
|
我的问题是:"{{question}}"
|
||||||
|
```
|
||||||
|
|
||||||
|
转义后则为:
|
||||||
|
```
|
||||||
|
你的背景知识:
|
||||||
|
"""
|
||||||
|
{instruction:"电影《铃芽之旅》的导演是谁?",output:"电影《铃芽之旅》的导演是新海诚。",source:"手动输入"}
|
||||||
|
{instruction:"本作的主人公是谁?",output:"本作的主人公是名叫铃芽的少女。",source:""}
|
||||||
|
{instruction:"电影《铃芽之旅》男主角是谁?",output:"电影《铃芽之旅》男主角是宗像草太,由松村北斗配音}
|
||||||
|
"""
|
||||||
|
对话要求:
|
||||||
|
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
||||||
|
2. 使用背景知识回答问题。
|
||||||
|
3. 背景知识无法回答问题时,你可以礼貌的的回答用户问题。
|
||||||
|
我的问题是:"{{question}}"
|
||||||
|
```
|
||||||
@ -1,109 +0,0 @@
|
|||||||
---
|
|
||||||
title: "提示词 & 引用提示词"
|
|
||||||
description: "FastGPT 提示词 & 引用提示词说明"
|
|
||||||
icon: "sign_language"
|
|
||||||
draft: false
|
|
||||||
toc: true
|
|
||||||
weight: 310
|
|
||||||
---
|
|
||||||
|
|
||||||
限定词从 V4.4.3 版本后去除,被“引用提示词”和“引用模板”替代。
|
|
||||||
|
|
||||||
# AI 对话消息组成
|
|
||||||
|
|
||||||
传递给 AI 模型的消息是一个数组,FastGPT 中这个数组的组成形式为:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
内置提示词(config.json 配置,一般为空)
|
|
||||||
提示词 (用户输入的提示词)
|
|
||||||
历史记录
|
|
||||||
问题(会由输入的问题、引用提示词和引用模板来决定)
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
{{% alert icon="🍅" context="success" %}}
|
|
||||||
Tips: 可以通过点击上下文按键查看完整的
|
|
||||||
{{% /alert %}}
|
|
||||||
|
|
||||||
# 引用模板和提示词设计
|
|
||||||
|
|
||||||
知识库采用 QA 对的格式存储,在转义成字符串时候会根据**引用模板**来进行格式化。知识库包含 3 个变量: q,a 和 source,可以通过 {{q}} {{a}} {{source}} 按需引入。下面一个模板例子:
|
|
||||||
|
|
||||||
**引用模板**
|
|
||||||
|
|
||||||
```
|
|
||||||
{instruction:"{{q}}",output:"{{a}}",source:"{{source}}"}
|
|
||||||
```
|
|
||||||
|
|
||||||
搜索到的知识库,会自动将 q,a,source 替换成对应的内容。每条搜索到的内容,会通过 `\n` 隔开。例如:
|
|
||||||
```
|
|
||||||
{instruction:"电影《铃芽之旅》的导演是谁?",output:"电影《铃芽之旅》的导演是新海诚。",source:"手动输入"}
|
|
||||||
{instruction:"本作的主人公是谁?",output:"本作的主人公是名叫铃芽的少女。",source:""}
|
|
||||||
{instruction:"电影《铃芽之旅》男主角是谁?",output:"电影《铃芽之旅》男主角是宗像草太,由松村北斗配音。",source:""}
|
|
||||||
{instruction:"电影《铃芽之旅》的编剧是谁?22",output:"新海诚是本片的编剧。",source:"手动输入"}
|
|
||||||
```
|
|
||||||
|
|
||||||
**引用提示词**
|
|
||||||
|
|
||||||
引用模板需要和引用提示词一起使用,提示词中可以写引用模板的格式说明以及对话的要求等。可以使用 {{quote}} 来使用 **引用模板**,使用 {{question}} 来引入问题。例如:
|
|
||||||
|
|
||||||
```
|
|
||||||
你的背景知识:
|
|
||||||
"""
|
|
||||||
{{quote}}
|
|
||||||
"""
|
|
||||||
对话要求:
|
|
||||||
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
|
||||||
2. 使用背景知识回答问题。
|
|
||||||
3. 背景知识无法回答问题时,你可以礼貌的的回答用户问题。
|
|
||||||
我的问题是:"{{question}}"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
# 提示词案例
|
|
||||||
|
|
||||||
## 仅回复知识库里的内容
|
|
||||||
|
|
||||||
**引用提示词**里添加:
|
|
||||||
```
|
|
||||||
你的背景知识:
|
|
||||||
"""
|
|
||||||
{{quote}}
|
|
||||||
"""
|
|
||||||
对话要求:
|
|
||||||
1. 回答前,请先判断背景知识是否足够回答问题,如果无法回答,请直接回复:“对不起,我无法回答你的问题~”。
|
|
||||||
2. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
|
||||||
3. 使用背景知识回答问题。
|
|
||||||
我的问题是:"{{question}}"
|
|
||||||
```
|
|
||||||
|
|
||||||
## 说明引用来源
|
|
||||||
|
|
||||||
**引用模板:**
|
|
||||||
|
|
||||||
```
|
|
||||||
{instruction:"{{q}}",output:"{{a}}",source:"{{source}}"}
|
|
||||||
```
|
|
||||||
|
|
||||||
**引用提示词:**
|
|
||||||
|
|
||||||
```
|
|
||||||
你的背景知识:
|
|
||||||
"""
|
|
||||||
{{quote}}
|
|
||||||
"""
|
|
||||||
对话要求:
|
|
||||||
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充,source是背景来源。
|
|
||||||
2. 使用背景知识回答问题。
|
|
||||||
3. 在回答问题后,你需要给出本次回答对应的背景来源,来源展示格式如下:
|
|
||||||
|
|
||||||
“
|
|
||||||
这是AI作答。本次知识来源:
|
|
||||||
1. source1
|
|
||||||
2. source2
|
|
||||||
......
|
|
||||||
”
|
|
||||||
|
|
||||||
我的问题是:"{{question}}"
|
|
||||||
```
|
|
||||||
@ -232,7 +232,7 @@ weight: 142
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
|
|||||||
@ -432,7 +432,7 @@ export default async function (ctx: FunctionContext) {
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "直接响应,无需配置",
|
"description": "直接响应,无需配置",
|
||||||
"type": "hidden",
|
"type": "hidden",
|
||||||
"targets": []
|
"targets": []
|
||||||
|
|||||||
@ -751,7 +751,7 @@ HTTP 模块允许你调用任意 POST 类型的 HTTP 接口,从而实验一些
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "模型AI回复回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
|
|||||||
@ -313,7 +313,7 @@ weight: 144
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
|
|||||||
@ -745,7 +745,7 @@ PS2:配置中的问题分类还包含着“联网搜索”,这个是另一
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
@ -903,7 +903,7 @@ PS2:配置中的问题分类还包含着“联网搜索”,这个是另一
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
@ -1117,7 +1117,7 @@ PS2:配置中的问题分类还包含着“联网搜索”,这个是另一
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
@ -1484,7 +1484,7 @@ PS2:配置中的问题分类还包含着“联网搜索”,这个是另一
|
|||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"key": "answerText",
|
"key": "answerText",
|
||||||
"label": "模型回复",
|
"label": "AI回复",
|
||||||
"description": "将在 stream 回复完毕后触发",
|
"description": "将在 stream 回复完毕后触发",
|
||||||
"valueType": "string",
|
"valueType": "string",
|
||||||
"type": "source",
|
"type": "source",
|
||||||
|
|||||||
@ -29,7 +29,9 @@ export async function connectMongo({
|
|||||||
bufferCommands: true,
|
bufferCommands: true,
|
||||||
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
|
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
|
||||||
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
|
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
|
||||||
minPoolSize: 2
|
minPoolSize: 2,
|
||||||
|
connectTimeoutMS: 20000,
|
||||||
|
waitQueueTimeoutMS: 20000
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('mongo connected');
|
console.log('mongo connected');
|
||||||
|
|||||||
@ -5,7 +5,9 @@
|
|||||||
"mongoose": "^7.0.2",
|
"mongoose": "^7.0.2",
|
||||||
"winston": "^3.10.0",
|
"winston": "^3.10.0",
|
||||||
"winston-mongodb": "^5.1.1",
|
"winston-mongodb": "^5.1.1",
|
||||||
"axios": "^1.5.1"
|
"axios": "^1.5.1",
|
||||||
|
"nextjs-cors": "^2.1.2",
|
||||||
|
"next": "13.5.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^20.8.5"
|
"@types/node": "^20.8.5"
|
||||||
|
|||||||
19
packages/common/tools/nextjs.ts
Normal file
19
packages/common/tools/nextjs.ts
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import type { NextApiResponse, NextApiHandler, NextApiRequest } from 'next';
|
||||||
|
import NextCors from 'nextjs-cors';
|
||||||
|
|
||||||
|
export function withNextCors(handler: NextApiHandler): NextApiHandler {
|
||||||
|
return async function nextApiHandlerWrappedWithNextCors(
|
||||||
|
req: NextApiRequest,
|
||||||
|
res: NextApiResponse
|
||||||
|
) {
|
||||||
|
const methods = ['GET', 'eHEAD', 'PUT', 'PATCH', 'POST', 'DELETE'];
|
||||||
|
const origin = req.headers.origin;
|
||||||
|
await NextCors(req, res, {
|
||||||
|
methods,
|
||||||
|
origin: origin,
|
||||||
|
optionsSuccessStatus: 200
|
||||||
|
});
|
||||||
|
|
||||||
|
return handler(req, res);
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -13,20 +13,10 @@ export const hashStr = (psw: string) => {
|
|||||||
/* simple text, remove chinese space and extra \n */
|
/* simple text, remove chinese space and extra \n */
|
||||||
export const simpleText = (text: string) => {
|
export const simpleText = (text: string) => {
|
||||||
text = text.replace(/([\u4e00-\u9fa5])[\s&&[^\n]]+([\u4e00-\u9fa5])/g, '$1$2');
|
text = text.replace(/([\u4e00-\u9fa5])[\s&&[^\n]]+([\u4e00-\u9fa5])/g, '$1$2');
|
||||||
text = text.replace(/\n{2,}/g, '\n');
|
text = text.replace(/\n{3,}/g, '\n\n');
|
||||||
text = text.replace(/[\s&&[^\n]]{2,}/g, ' ');
|
text = text.replace(/[\s&&[^\n]]{2,}/g, ' ');
|
||||||
text = text.replace(/[\x00-\x08]/g, ' ');
|
text = text.replace(/[\x00-\x08]/g, ' ');
|
||||||
|
text = text.replace(/\r\n|\r/g, '\n');
|
||||||
|
|
||||||
// replace empty \n
|
return text;
|
||||||
let newText = '';
|
|
||||||
let lastChar = '';
|
|
||||||
for (let i = 0; i < text.length; i++) {
|
|
||||||
const currentChar = text[i];
|
|
||||||
if (currentChar === '\n' && !/[。?!;.?!;]/g.test(lastChar)) {
|
|
||||||
} else {
|
|
||||||
newText += currentChar;
|
|
||||||
}
|
|
||||||
lastChar = currentChar;
|
|
||||||
}
|
|
||||||
return newText;
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -11,6 +11,7 @@ export const getAIApi = (props?: UserModelSchema['openaiAccount'], timeout = 600
|
|||||||
apiKey: props?.key || systemAIChatKey,
|
apiKey: props?.key || systemAIChatKey,
|
||||||
baseURL: props?.baseUrl || baseUrl,
|
baseURL: props?.baseUrl || baseUrl,
|
||||||
httpAgent: global.httpsAgent,
|
httpAgent: global.httpsAgent,
|
||||||
timeout
|
timeout,
|
||||||
|
maxRetries: 2
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
6
packages/core/ai/type.d.ts
vendored
6
packages/core/ai/type.d.ts
vendored
@ -4,3 +4,9 @@ export type ChatCompletion = OpenAI.Chat.ChatCompletion;
|
|||||||
export type CreateChatCompletionRequest = OpenAI.Chat.ChatCompletionCreateParams;
|
export type CreateChatCompletionRequest = OpenAI.Chat.ChatCompletionCreateParams;
|
||||||
|
|
||||||
export type StreamChatType = Stream<OpenAI.Chat.ChatCompletionChunk>;
|
export type StreamChatType = Stream<OpenAI.Chat.ChatCompletionChunk>;
|
||||||
|
|
||||||
|
export type PromptTemplateItem = {
|
||||||
|
title: string;
|
||||||
|
desc: string;
|
||||||
|
value: string;
|
||||||
|
};
|
||||||
|
|||||||
@ -5,7 +5,7 @@
|
|||||||
"@fastgpt/common": "workspace:*",
|
"@fastgpt/common": "workspace:*",
|
||||||
"@fastgpt/support": "workspace:*",
|
"@fastgpt/support": "workspace:*",
|
||||||
"encoding": "^0.1.13",
|
"encoding": "^0.1.13",
|
||||||
"openai": "^4.11.1",
|
"openai": "^4.12.1",
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@ -63,5 +63,6 @@ export type AuthShareChatInitProps = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export function authShareChatInit(data: AuthShareChatInitProps) {
|
export function authShareChatInit(data: AuthShareChatInitProps) {
|
||||||
|
if (!global.feConfigs?.isPlus) return;
|
||||||
return POST('/support/outLink/authShareChatInit', data);
|
return POST('/support/outLink/authShareChatInit', data);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,7 +5,8 @@
|
|||||||
"@fastgpt/common": "workspace:*",
|
"@fastgpt/common": "workspace:*",
|
||||||
"cookie": "^0.5.0",
|
"cookie": "^0.5.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"axios": "^1.5.1"
|
"axios": "^1.5.1",
|
||||||
|
"next": "13.5.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/cookie": "^0.5.2",
|
"@types/cookie": "^0.5.2",
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
|
import type { NextApiResponse, NextApiRequest } from 'next';
|
||||||
import Cookie from 'cookie';
|
import Cookie from 'cookie';
|
||||||
import { authJWT } from './tools';
|
import jwt from 'jsonwebtoken';
|
||||||
import { authOpenApiKey } from '../openapi/auth';
|
import { authOpenApiKey } from '../openapi/auth';
|
||||||
import { authOutLinkId } from '../outLink/auth';
|
import { authOutLinkId } from '../outLink/auth';
|
||||||
|
|
||||||
import { MongoUser } from './schema';
|
import { MongoUser } from './schema';
|
||||||
import type { UserModelSchema } from './type.d';
|
import type { UserModelSchema } from './type.d';
|
||||||
import { ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
|
import { ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
|
||||||
@ -39,7 +39,7 @@ export const authUser = async ({
|
|||||||
authBalance = false,
|
authBalance = false,
|
||||||
authOutLink
|
authOutLink
|
||||||
}: {
|
}: {
|
||||||
req: any;
|
req: NextApiRequest;
|
||||||
authToken?: boolean;
|
authToken?: boolean;
|
||||||
authRoot?: boolean;
|
authRoot?: boolean;
|
||||||
authApiKey?: boolean;
|
authApiKey?: boolean;
|
||||||
@ -165,3 +165,42 @@ export const authUser = async ({
|
|||||||
apikey: openApiKey
|
apikey: openApiKey
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* 生成 token */
|
||||||
|
export function generateToken(userId: string) {
|
||||||
|
const key = process.env.TOKEN_KEY as string;
|
||||||
|
const token = jwt.sign(
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
exp: Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 7
|
||||||
|
},
|
||||||
|
key
|
||||||
|
);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
// auth token
|
||||||
|
export function authJWT(token: string) {
|
||||||
|
return new Promise<string>((resolve, reject) => {
|
||||||
|
const key = process.env.TOKEN_KEY as string;
|
||||||
|
|
||||||
|
jwt.verify(token, key, function (err, decoded: any) {
|
||||||
|
if (err || !decoded?.userId) {
|
||||||
|
reject(ERROR_ENUM.unAuthorization);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve(decoded.userId);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/* set cookie */
|
||||||
|
export const setCookie = (res: NextApiResponse, token: string) => {
|
||||||
|
res.setHeader(
|
||||||
|
'Set-Cookie',
|
||||||
|
`token=${token}; Path=/; HttpOnly; Max-Age=604800; Samesite=None; Secure;`
|
||||||
|
);
|
||||||
|
};
|
||||||
|
/* clear cookie */
|
||||||
|
export const clearCookie = (res: NextApiResponse) => {
|
||||||
|
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
|
||||||
|
};
|
||||||
|
|||||||
@ -1,28 +0,0 @@
|
|||||||
import jwt from 'jsonwebtoken';
|
|
||||||
import { ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
|
|
||||||
|
|
||||||
/* 生成 token */
|
|
||||||
export const generateToken = (userId: string) => {
|
|
||||||
const key = process.env.TOKEN_KEY as string;
|
|
||||||
const token = jwt.sign(
|
|
||||||
{
|
|
||||||
userId,
|
|
||||||
exp: Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 7
|
|
||||||
},
|
|
||||||
key
|
|
||||||
);
|
|
||||||
return token;
|
|
||||||
};
|
|
||||||
// auth token
|
|
||||||
export const authJWT = (token: string) =>
|
|
||||||
new Promise<string>((resolve, reject) => {
|
|
||||||
const key = process.env.TOKEN_KEY as string;
|
|
||||||
|
|
||||||
jwt.verify(token, key, function (err, decoded: any) {
|
|
||||||
if (err || !decoded?.userId) {
|
|
||||||
reject(ERROR_ENUM.unAuthorization);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(decoded.userId);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
27
pnpm-lock.yaml
generated
27
pnpm-lock.yaml
generated
@ -35,6 +35,12 @@ importers:
|
|||||||
mongoose:
|
mongoose:
|
||||||
specifier: ^7.0.2
|
specifier: ^7.0.2
|
||||||
version: registry.npmmirror.com/mongoose@7.0.2
|
version: registry.npmmirror.com/mongoose@7.0.2
|
||||||
|
next:
|
||||||
|
specifier: 13.5.2
|
||||||
|
version: registry.npmmirror.com/next@13.5.2(@babel/core@7.23.2)(react-dom@18.2.0)(react@18.2.0)(sass@1.58.3)
|
||||||
|
nextjs-cors:
|
||||||
|
specifier: ^2.1.2
|
||||||
|
version: registry.npmmirror.com/nextjs-cors@2.1.2(next@13.5.2)
|
||||||
winston:
|
winston:
|
||||||
specifier: ^3.10.0
|
specifier: ^3.10.0
|
||||||
version: registry.npmmirror.com/winston@3.10.0
|
version: registry.npmmirror.com/winston@3.10.0
|
||||||
@ -58,8 +64,8 @@ importers:
|
|||||||
specifier: ^0.1.13
|
specifier: ^0.1.13
|
||||||
version: registry.npmmirror.com/encoding@0.1.13
|
version: registry.npmmirror.com/encoding@0.1.13
|
||||||
openai:
|
openai:
|
||||||
specifier: ^4.11.1
|
specifier: ^4.12.1
|
||||||
version: registry.npmmirror.com/openai@4.11.1(encoding@0.1.13)
|
version: registry.npmmirror.com/openai@4.12.1(encoding@0.1.13)
|
||||||
tunnel:
|
tunnel:
|
||||||
specifier: ^0.0.6
|
specifier: ^0.0.6
|
||||||
version: registry.npmmirror.com/tunnel@0.0.6
|
version: registry.npmmirror.com/tunnel@0.0.6
|
||||||
@ -82,6 +88,9 @@ importers:
|
|||||||
jsonwebtoken:
|
jsonwebtoken:
|
||||||
specifier: ^9.0.2
|
specifier: ^9.0.2
|
||||||
version: registry.npmmirror.com/jsonwebtoken@9.0.2
|
version: registry.npmmirror.com/jsonwebtoken@9.0.2
|
||||||
|
next:
|
||||||
|
specifier: 13.5.2
|
||||||
|
version: registry.npmmirror.com/next@13.5.2(@babel/core@7.23.2)(react-dom@18.2.0)(react@18.2.0)(sass@1.58.3)
|
||||||
devDependencies:
|
devDependencies:
|
||||||
'@types/cookie':
|
'@types/cookie':
|
||||||
specifier: ^0.5.2
|
specifier: ^0.5.2
|
||||||
@ -200,9 +209,6 @@ importers:
|
|||||||
next-i18next:
|
next-i18next:
|
||||||
specifier: ^14.0.0
|
specifier: ^14.0.0
|
||||||
version: registry.npmmirror.com/next-i18next@14.0.3(i18next@23.5.1)(next@13.5.2)(react-i18next@13.2.2)(react@18.2.0)
|
version: registry.npmmirror.com/next-i18next@14.0.3(i18next@23.5.1)(next@13.5.2)(react-i18next@13.2.2)(react@18.2.0)
|
||||||
nextjs-cors:
|
|
||||||
specifier: ^2.1.2
|
|
||||||
version: registry.npmmirror.com/nextjs-cors@2.1.2(next@13.5.2)
|
|
||||||
nprogress:
|
nprogress:
|
||||||
specifier: ^0.2.0
|
specifier: ^0.2.0
|
||||||
version: registry.npmmirror.com/nprogress@0.2.0
|
version: registry.npmmirror.com/nprogress@0.2.0
|
||||||
@ -288,6 +294,9 @@ importers:
|
|||||||
'@types/multer':
|
'@types/multer':
|
||||||
specifier: ^1.4.7
|
specifier: ^1.4.7
|
||||||
version: registry.npmmirror.com/@types/multer@1.4.7
|
version: registry.npmmirror.com/@types/multer@1.4.7
|
||||||
|
'@types/node':
|
||||||
|
specifier: ^20.8.5
|
||||||
|
version: registry.npmmirror.com/@types/node@20.8.5
|
||||||
'@types/papaparse':
|
'@types/papaparse':
|
||||||
specifier: ^5.3.7
|
specifier: ^5.3.7
|
||||||
version: registry.npmmirror.com/@types/papaparse@5.3.7
|
version: registry.npmmirror.com/@types/papaparse@5.3.7
|
||||||
@ -9581,11 +9590,11 @@ packages:
|
|||||||
mimic-fn: registry.npmmirror.com/mimic-fn@4.0.0
|
mimic-fn: registry.npmmirror.com/mimic-fn@4.0.0
|
||||||
dev: true
|
dev: true
|
||||||
|
|
||||||
registry.npmmirror.com/openai@4.11.1(encoding@0.1.13):
|
registry.npmmirror.com/openai@4.12.1(encoding@0.1.13):
|
||||||
resolution: {integrity: sha512-GU0HQWbejXuVAQlDjxIE8pohqnjptFDIm32aPlNT1H9ucMz1VJJD0DaTJRQsagNaJ97awWjjVLEG7zCM6sm4SA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-4.11.1.tgz}
|
resolution: {integrity: sha512-EAoUwm4dtiWvFwBhOCK/VfF8sj1ZU8+aAIJnfT4NyeTfrt1DM/6Gdd6fOZWTjBYryTAqu9Vpb5+9Wu6JMtm/gA==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-4.12.1.tgz}
|
||||||
id: registry.npmmirror.com/openai/4.11.1
|
id: registry.npmmirror.com/openai/4.12.1
|
||||||
name: openai
|
name: openai
|
||||||
version: 4.11.1
|
version: 4.12.1
|
||||||
hasBin: true
|
hasBin: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@types/node': registry.npmmirror.com/@types/node@18.18.5
|
'@types/node': registry.npmmirror.com/@types/node@18.18.5
|
||||||
|
|||||||
@ -8,68 +8,85 @@
|
|||||||
{
|
{
|
||||||
"model": "gpt-3.5-turbo",
|
"model": "gpt-3.5-turbo",
|
||||||
"name": "GPT35-4k",
|
"name": "GPT35-4k",
|
||||||
"contextMaxToken": 4000,
|
"price": 0,
|
||||||
|
"maxToken": 4000,
|
||||||
"quoteMaxToken": 2000,
|
"quoteMaxToken": 2000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"censor": false,
|
||||||
"defaultSystem": ""
|
"defaultSystemChatPrompt": ""
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"model": "gpt-3.5-turbo-16k",
|
"model": "gpt-3.5-turbo-16k",
|
||||||
"name": "GPT35-16k",
|
"name": "GPT35-16k",
|
||||||
"contextMaxToken": 16000,
|
"maxToken": 16000,
|
||||||
|
"price": 0,
|
||||||
"quoteMaxToken": 8000,
|
"quoteMaxToken": 8000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
"price": 0,
|
"censor": false,
|
||||||
"defaultSystem": ""
|
"defaultSystemChatPrompt": ""
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"model": "gpt-4",
|
"model": "gpt-4",
|
||||||
"name": "GPT4-8k",
|
"name": "GPT4-8k",
|
||||||
"contextMaxToken": 8000,
|
"maxToken": 8000,
|
||||||
|
"price": 0,
|
||||||
"quoteMaxToken": 4000,
|
"quoteMaxToken": 4000,
|
||||||
"maxTemperature": 1.2,
|
"maxTemperature": 1.2,
|
||||||
|
"censor": false,
|
||||||
|
"defaultSystemChatPrompt": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"QAModels": [
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
|
"price": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"CQModels": [
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
"price": 0,
|
"price": 0,
|
||||||
"defaultSystem": ""
|
"functionCall": true,
|
||||||
|
"functionPrompt": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "gpt-4",
|
||||||
|
"name": "GPT4-8k",
|
||||||
|
"maxToken": 8000,
|
||||||
|
"price": 0,
|
||||||
|
"functionCall": true,
|
||||||
|
"functionPrompt": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"ExtractModels": [
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo-16k",
|
||||||
|
"name": "GPT35-16k",
|
||||||
|
"maxToken": 16000,
|
||||||
|
"price": 0,
|
||||||
|
"functionCall": true,
|
||||||
|
"functionPrompt": ""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"QGModels": [
|
||||||
|
{
|
||||||
|
"model": "gpt-3.5-turbo",
|
||||||
|
"name": "GPT35-4K",
|
||||||
|
"maxToken": 4000,
|
||||||
|
"price": 0
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"VectorModels": [
|
"VectorModels": [
|
||||||
{
|
{
|
||||||
"model": "text-embedding-ada-002",
|
"model": "text-embedding-ada-002",
|
||||||
"name": "Embedding-2",
|
"name": "Embedding-2",
|
||||||
"price": 0,
|
"price": 0.2,
|
||||||
"defaultToken": 500,
|
"defaultToken": 700,
|
||||||
"maxToken": 3000
|
"maxToken": 3000
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"QAModel": {
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0
|
|
||||||
},
|
|
||||||
"ExtractModel": {
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"functionCall": true,
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": ""
|
|
||||||
},
|
|
||||||
"CQModel": {
|
|
||||||
"model": "gpt-3.5-turbo-16k",
|
|
||||||
"functionCall": true,
|
|
||||||
"name": "GPT35-16k",
|
|
||||||
"maxToken": 16000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": ""
|
|
||||||
},
|
|
||||||
"QGModel": {
|
|
||||||
"model": "gpt-3.5-turbo",
|
|
||||||
"name": "GPT35-4k",
|
|
||||||
"maxToken": 4000,
|
|
||||||
"price": 0,
|
|
||||||
"prompt": "",
|
|
||||||
"functionCall": false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "app",
|
"name": "app",
|
||||||
"version": "4.4.7",
|
"version": "4.5.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev",
|
"dev": "next dev",
|
||||||
@ -31,6 +31,7 @@
|
|||||||
"formidable": "^2.1.1",
|
"formidable": "^2.1.1",
|
||||||
"framer-motion": "^9.0.6",
|
"framer-motion": "^9.0.6",
|
||||||
"hyperdown": "^2.4.29",
|
"hyperdown": "^2.4.29",
|
||||||
|
"i18next": "^23.2.11",
|
||||||
"immer": "^9.0.19",
|
"immer": "^9.0.19",
|
||||||
"js-cookie": "^3.0.5",
|
"js-cookie": "^3.0.5",
|
||||||
"js-tiktoken": "^1.0.7",
|
"js-tiktoken": "^1.0.7",
|
||||||
@ -43,7 +44,7 @@
|
|||||||
"multer": "1.4.5-lts.1",
|
"multer": "1.4.5-lts.1",
|
||||||
"nanoid": "^4.0.1",
|
"nanoid": "^4.0.1",
|
||||||
"next": "13.5.2",
|
"next": "13.5.2",
|
||||||
"nextjs-cors": "^2.1.2",
|
"next-i18next": "^14.0.0",
|
||||||
"nprogress": "^0.2.0",
|
"nprogress": "^0.2.0",
|
||||||
"papaparse": "^5.4.1",
|
"papaparse": "^5.4.1",
|
||||||
"pg": "^8.10.0",
|
"pg": "^8.10.0",
|
||||||
@ -52,6 +53,7 @@
|
|||||||
"react-day-picker": "^8.7.1",
|
"react-day-picker": "^8.7.1",
|
||||||
"react-dom": "18.2.0",
|
"react-dom": "18.2.0",
|
||||||
"react-hook-form": "^7.43.1",
|
"react-hook-form": "^7.43.1",
|
||||||
|
"react-i18next": "^13.0.2",
|
||||||
"react-markdown": "^8.0.7",
|
"react-markdown": "^8.0.7",
|
||||||
"react-syntax-highlighter": "^15.5.0",
|
"react-syntax-highlighter": "^15.5.0",
|
||||||
"reactflow": "^11.7.4",
|
"reactflow": "^11.7.4",
|
||||||
@ -62,10 +64,7 @@
|
|||||||
"request-ip": "^3.3.0",
|
"request-ip": "^3.3.0",
|
||||||
"sass": "^1.58.3",
|
"sass": "^1.58.3",
|
||||||
"timezones-list": "^3.0.2",
|
"timezones-list": "^3.0.2",
|
||||||
"zustand": "^4.3.5",
|
"zustand": "^4.3.5"
|
||||||
"i18next": "^23.2.11",
|
|
||||||
"react-i18next": "^13.0.2",
|
|
||||||
"next-i18next": "^14.0.0"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@svgr/webpack": "^6.5.1",
|
"@svgr/webpack": "^6.5.1",
|
||||||
@ -76,6 +75,7 @@
|
|||||||
"@types/jsonwebtoken": "^9.0.3",
|
"@types/jsonwebtoken": "^9.0.3",
|
||||||
"@types/lodash": "^4.14.191",
|
"@types/lodash": "^4.14.191",
|
||||||
"@types/multer": "^1.4.7",
|
"@types/multer": "^1.4.7",
|
||||||
|
"@types/node": "^20.8.5",
|
||||||
"@types/papaparse": "^5.3.7",
|
"@types/papaparse": "^5.3.7",
|
||||||
"@types/pg": "^8.6.6",
|
"@types/pg": "^8.6.6",
|
||||||
"@types/react": "18.0.28",
|
"@types/react": "18.0.28",
|
||||||
|
|||||||
@ -1,6 +1,10 @@
|
|||||||
### Fast GPT V4.4.7
|
### Fast GPT V4.5.0
|
||||||
|
|
||||||
1. 优化数据集管理,区分手动录入和标注,可追数据至某个文件,保留链接读取的原始链接。
|
1. 新增 - 升级 PgVector 插件,引入 HNSW 索引,极大加快的知识库搜索速度。
|
||||||
2. [使用文档](https://doc.fastgpt.run/docs/intro/)
|
2. 新增 - AI对话模块,增加【返回AI内容】选项,可控制 AI 的内容不直接返回浏览器。
|
||||||
3. [点击查看高级编排介绍文档](https://doc.fastgpt.run/docs/workflow)
|
3. 优化 - TextSplitter,采用递归拆解法。
|
||||||
4. [点击查看商业版](https://doc.fastgpt.run/docs/commercial/)
|
4. 优化 - 高级编排 UX 性能
|
||||||
|
5. 优化数据集管理,区分手动录入和标注,可追数据至某个文件,保留链接读取的原始链接。
|
||||||
|
6. [使用文档](https://doc.fastgpt.run/docs/intro/)
|
||||||
|
7. [点击查看高级编排介绍文档](https://doc.fastgpt.run/docs/workflow)
|
||||||
|
8. [点击查看商业版](https://doc.fastgpt.run/docs/commercial/)
|
||||||
|
|||||||
@ -39,7 +39,7 @@
|
|||||||
"My Apps": "My Apps",
|
"My Apps": "My Apps",
|
||||||
"Output Field Settings": "Output Field Settings",
|
"Output Field Settings": "Output Field Settings",
|
||||||
"Paste Config": "Paste Config",
|
"Paste Config": "Paste Config",
|
||||||
"Quote Prompt Settings": "Quote Prompt Settings",
|
"AI Settings": "AI Settings",
|
||||||
"Variable Key Repeat Tip": "Variable Key Repeat",
|
"Variable Key Repeat Tip": "Variable Key Repeat",
|
||||||
"module": {
|
"module": {
|
||||||
"Custom Title Tip": "The title name is displayed during the conversation"
|
"Custom Title Tip": "The title name is displayed during the conversation"
|
||||||
|
|||||||
@ -39,7 +39,7 @@
|
|||||||
"My Apps": "我的应用",
|
"My Apps": "我的应用",
|
||||||
"Output Field Settings": "输出字段编辑",
|
"Output Field Settings": "输出字段编辑",
|
||||||
"Paste Config": "粘贴配置",
|
"Paste Config": "粘贴配置",
|
||||||
"Quote Prompt Settings": "引用提示词配置",
|
"AI Settings": "AI 高级配置",
|
||||||
"Variable Key Repeat Tip": "变量 key 重复",
|
"Variable Key Repeat Tip": "变量 key 重复",
|
||||||
"module": {
|
"module": {
|
||||||
"Custom Title Tip": "该标题名字会展示在对话过程中"
|
"Custom Title Tip": "该标题名字会展示在对话过程中"
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import { SystemInputEnum } from '@/constants/app';
|
import { SystemInputEnum } from '@/constants/app';
|
||||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||||
import { getChatModel } from '@/service/utils/data';
|
|
||||||
import { AppModuleItemType, VariableItemType } from '@/types/app';
|
import { AppModuleItemType, VariableItemType } from '@/types/app';
|
||||||
|
|
||||||
export const getGuideModule = (modules: AppModuleItemType[]) =>
|
export const getGuideModule = (modules: AppModuleItemType[]) =>
|
||||||
@ -23,11 +22,3 @@ export const splitGuideModule = (guideModules?: AppModuleItemType) => {
|
|||||||
questionGuide
|
questionGuide
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
export const getChatModelNameList = (modules: AppModuleItemType[]): string[] => {
|
|
||||||
const chatModules = modules.filter((item) => item.flowType === FlowModuleTypeEnum.chatNode);
|
|
||||||
return chatModules
|
|
||||||
.map(
|
|
||||||
(item) => getChatModel(item.inputs.find((input) => input.key === 'model')?.value)?.name || ''
|
|
||||||
)
|
|
||||||
.filter((item) => item);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -62,7 +62,9 @@ const Markdown = ({ source, isChatting = false }: { source: string; isChatting?:
|
|||||||
[]
|
[]
|
||||||
);
|
);
|
||||||
|
|
||||||
const formatSource = source.replace(/\\n/g, '\n ');
|
const formatSource = source
|
||||||
|
.replace(/\\n/g, '\n ')
|
||||||
|
.replace(/(http[s]?:\/\/[^\s,。]+)([。,])/g, '$1 $2');
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ReactMarkdown
|
<ReactMarkdown
|
||||||
|
|||||||
@ -35,8 +35,6 @@ const MyModal = ({
|
|||||||
>
|
>
|
||||||
<ModalOverlay />
|
<ModalOverlay />
|
||||||
<ModalContent
|
<ModalContent
|
||||||
display={'flex'}
|
|
||||||
flexDirection={'column'}
|
|
||||||
w={w}
|
w={w}
|
||||||
minW={['90vw', '400px']}
|
minW={['90vw', '400px']}
|
||||||
maxW={maxW}
|
maxW={maxW}
|
||||||
@ -46,7 +44,7 @@ const MyModal = ({
|
|||||||
>
|
>
|
||||||
{!!title && <ModalHeader>{title}</ModalHeader>}
|
{!!title && <ModalHeader>{title}</ModalHeader>}
|
||||||
{onClose && <ModalCloseButton />}
|
{onClose && <ModalCloseButton />}
|
||||||
<Box overflow={'overlay'} h={'100%'}>
|
<Box overflow={'overlay'} h={'100%'} display={'flex'} flexDirection={'column'}>
|
||||||
{children}
|
{children}
|
||||||
</Box>
|
</Box>
|
||||||
</ModalContent>
|
</ModalContent>
|
||||||
|
|||||||
64
projects/app/src/components/PromptTemplate/index.tsx
Normal file
64
projects/app/src/components/PromptTemplate/index.tsx
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import React, { useState } from 'react';
|
||||||
|
import MyModal from '../MyModal';
|
||||||
|
import { Box, Button, Grid, useTheme } from '@chakra-ui/react';
|
||||||
|
import { PromptTemplateItem } from '@fastgpt/core/ai/type';
|
||||||
|
import { ModalBody, ModalFooter } from '@chakra-ui/react';
|
||||||
|
|
||||||
|
const PromptTemplate = ({
|
||||||
|
title,
|
||||||
|
templates,
|
||||||
|
onClose,
|
||||||
|
onSuccess
|
||||||
|
}: {
|
||||||
|
title: string;
|
||||||
|
templates: PromptTemplateItem[];
|
||||||
|
onClose: () => void;
|
||||||
|
onSuccess: (e: string) => void;
|
||||||
|
}) => {
|
||||||
|
const theme = useTheme();
|
||||||
|
const [selectTemplateTitle, setSelectTemplateTitle] = useState<PromptTemplateItem>();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<MyModal isOpen title={title} onClose={onClose}>
|
||||||
|
<ModalBody w={'600px'}>
|
||||||
|
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gridGap={4}>
|
||||||
|
{templates.map((item) => (
|
||||||
|
<Box
|
||||||
|
key={item.title}
|
||||||
|
border={theme.borders.base}
|
||||||
|
py={2}
|
||||||
|
px={2}
|
||||||
|
borderRadius={'md'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
{...(item.title === selectTemplateTitle?.title
|
||||||
|
? {
|
||||||
|
bg: 'myBlue.100'
|
||||||
|
}
|
||||||
|
: {})}
|
||||||
|
onClick={() => setSelectTemplateTitle(item)}
|
||||||
|
>
|
||||||
|
<Box>{item.title}</Box>
|
||||||
|
<Box color={'myGray.600'} fontSize={'sm'} whiteSpace={'pre-wrap'}>
|
||||||
|
{item.value}
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
))}
|
||||||
|
</Grid>
|
||||||
|
</ModalBody>
|
||||||
|
<ModalFooter>
|
||||||
|
<Button
|
||||||
|
disabled={!selectTemplateTitle}
|
||||||
|
onClick={() => {
|
||||||
|
if (!selectTemplateTitle) return;
|
||||||
|
onSuccess(selectTemplateTitle.value);
|
||||||
|
onClose();
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
确认选择
|
||||||
|
</Button>
|
||||||
|
</ModalFooter>
|
||||||
|
</MyModal>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default PromptTemplate;
|
||||||
@ -5,7 +5,8 @@ export enum SystemInputEnum {
|
|||||||
'switch' = 'switch', // a trigger switch
|
'switch' = 'switch', // a trigger switch
|
||||||
'history' = 'history',
|
'history' = 'history',
|
||||||
'userChatInput' = 'userChatInput',
|
'userChatInput' = 'userChatInput',
|
||||||
'questionGuide' = 'questionGuide'
|
'questionGuide' = 'questionGuide',
|
||||||
|
isResponseAnswerText = 'isResponseAnswerText'
|
||||||
}
|
}
|
||||||
export enum SystemOutputEnum {
|
export enum SystemOutputEnum {
|
||||||
finish = 'finish'
|
finish = 'finish'
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import {
|
|||||||
} from './index';
|
} from './index';
|
||||||
import type { AppItemType } from '@/types/app';
|
import type { AppItemType } from '@/types/app';
|
||||||
import type { FlowModuleTemplateType } from '@/types/core/app/flow';
|
import type { FlowModuleTemplateType } from '@/types/core/app/flow';
|
||||||
import { chatModelList } from '@/web/common/store/static';
|
import { chatModelList, cqModelList } from '@/web/common/store/static';
|
||||||
import {
|
import {
|
||||||
Input_Template_History,
|
Input_Template_History,
|
||||||
Input_Template_TFSwitch,
|
Input_Template_TFSwitch,
|
||||||
@ -136,14 +136,14 @@ export const ChatModule: FlowModuleTemplateType = {
|
|||||||
key: 'model',
|
key: 'model',
|
||||||
type: FlowInputItemTypeEnum.selectChatModel,
|
type: FlowInputItemTypeEnum.selectChatModel,
|
||||||
label: '对话模型',
|
label: '对话模型',
|
||||||
value: chatModelList[0]?.model,
|
value: chatModelList?.[0]?.model,
|
||||||
list: chatModelList.map((item) => ({ label: item.name, value: item.model })),
|
customData: () => chatModelList,
|
||||||
required: true,
|
required: true,
|
||||||
valueCheck: (val) => !!val
|
valueCheck: (val) => !!val
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'temperature',
|
key: 'temperature',
|
||||||
type: FlowInputItemTypeEnum.slider,
|
type: FlowInputItemTypeEnum.hidden,
|
||||||
label: '温度',
|
label: '温度',
|
||||||
value: 0,
|
value: 0,
|
||||||
min: 0,
|
min: 0,
|
||||||
@ -156,20 +156,26 @@ export const ChatModule: FlowModuleTemplateType = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'maxToken',
|
key: 'maxToken',
|
||||||
type: FlowInputItemTypeEnum.maxToken,
|
type: FlowInputItemTypeEnum.hidden,
|
||||||
label: '回复上限',
|
label: '回复上限',
|
||||||
value: chatModelList[0] ? chatModelList[0].contextMaxToken / 2 : 2000,
|
value: chatModelList?.[0] ? chatModelList[0].maxToken / 2 : 2000,
|
||||||
min: 100,
|
min: 100,
|
||||||
max: chatModelList[0]?.contextMaxToken || 4000,
|
max: chatModelList?.[0]?.maxToken || 4000,
|
||||||
step: 50,
|
step: 50,
|
||||||
markList: [
|
markList: [
|
||||||
{ label: '100', value: 100 },
|
{ label: '100', value: 100 },
|
||||||
{
|
{
|
||||||
label: `${chatModelList[0]?.contextMaxToken || 4000}`,
|
label: `${chatModelList?.[0]?.maxToken || 4000}`,
|
||||||
value: chatModelList[0]?.contextMaxToken || 4000
|
value: chatModelList?.[0]?.maxToken || 4000
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'aiSettings',
|
||||||
|
type: FlowInputItemTypeEnum.aiSettings,
|
||||||
|
label: '',
|
||||||
|
connected: false
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'systemPrompt',
|
key: 'systemPrompt',
|
||||||
type: FlowInputItemTypeEnum.textarea,
|
type: FlowInputItemTypeEnum.textarea,
|
||||||
@ -180,6 +186,13 @@ export const ChatModule: FlowModuleTemplateType = {
|
|||||||
placeholder: ChatModelSystemTip,
|
placeholder: ChatModelSystemTip,
|
||||||
value: ''
|
value: ''
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: SystemInputEnum.isResponseAnswerText,
|
||||||
|
type: FlowInputItemTypeEnum.hidden,
|
||||||
|
label: '返回AI内容',
|
||||||
|
valueType: FlowValueTypeEnum.boolean,
|
||||||
|
value: true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'quoteTemplate',
|
key: 'quoteTemplate',
|
||||||
type: FlowInputItemTypeEnum.hidden,
|
type: FlowInputItemTypeEnum.hidden,
|
||||||
@ -196,7 +209,7 @@ export const ChatModule: FlowModuleTemplateType = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: 'quoteQA',
|
key: 'quoteQA',
|
||||||
type: FlowInputItemTypeEnum.quoteList,
|
type: FlowInputItemTypeEnum.target,
|
||||||
label: '引用内容',
|
label: '引用内容',
|
||||||
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
|
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
|
||||||
valueType: FlowValueTypeEnum.kbQuote,
|
valueType: FlowValueTypeEnum.kbQuote,
|
||||||
@ -216,7 +229,7 @@ export const ChatModule: FlowModuleTemplateType = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: TaskResponseKeyEnum.answerText,
|
key: TaskResponseKeyEnum.answerText,
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '将在 stream 回复完毕后触发',
|
description: '将在 stream 回复完毕后触发',
|
||||||
valueType: FlowValueTypeEnum.string,
|
valueType: FlowValueTypeEnum.string,
|
||||||
type: FlowOutputItemTypeEnum.source,
|
type: FlowOutputItemTypeEnum.source,
|
||||||
@ -330,12 +343,21 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
|
|||||||
showStatus: true,
|
showStatus: true,
|
||||||
inputs: [
|
inputs: [
|
||||||
Input_Template_TFSwitch,
|
Input_Template_TFSwitch,
|
||||||
|
{
|
||||||
|
key: 'model',
|
||||||
|
type: FlowInputItemTypeEnum.selectChatModel,
|
||||||
|
label: '分类模型',
|
||||||
|
value: cqModelList?.[0]?.model,
|
||||||
|
customData: () => cqModelList,
|
||||||
|
required: true,
|
||||||
|
valueCheck: (val) => !!val
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'systemPrompt',
|
key: 'systemPrompt',
|
||||||
type: FlowInputItemTypeEnum.textarea,
|
type: FlowInputItemTypeEnum.textarea,
|
||||||
valueType: FlowValueTypeEnum.string,
|
valueType: FlowValueTypeEnum.string,
|
||||||
value: '',
|
value: '',
|
||||||
label: '系统提示词',
|
label: '背景知识',
|
||||||
description:
|
description:
|
||||||
'你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
|
'你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
|
||||||
placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统'
|
placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统'
|
||||||
@ -504,7 +526,7 @@ export const AppModule: FlowModuleTemplateType = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: TaskResponseKeyEnum.answerText,
|
key: TaskResponseKeyEnum.answerText,
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '将在应用完全结束后触发',
|
description: '将在应用完全结束后触发',
|
||||||
valueType: FlowValueTypeEnum.string,
|
valueType: FlowValueTypeEnum.string,
|
||||||
type: FlowOutputItemTypeEnum.source,
|
type: FlowOutputItemTypeEnum.source,
|
||||||
@ -757,7 +779,7 @@ export const appTemplates: (AppItemType & {
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '直接响应,无需配置',
|
description: '直接响应,无需配置',
|
||||||
type: 'hidden',
|
type: 'hidden',
|
||||||
targets: []
|
targets: []
|
||||||
@ -1094,7 +1116,7 @@ export const appTemplates: (AppItemType & {
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '直接响应,无需配置',
|
description: '直接响应,无需配置',
|
||||||
type: 'hidden',
|
type: 'hidden',
|
||||||
targets: []
|
targets: []
|
||||||
@ -1401,7 +1423,7 @@ export const appTemplates: (AppItemType & {
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '将在 stream 回复完毕后触发',
|
description: '将在 stream 回复完毕后触发',
|
||||||
valueType: 'string',
|
valueType: 'string',
|
||||||
type: 'source',
|
type: 'source',
|
||||||
@ -1863,7 +1885,7 @@ export const appTemplates: (AppItemType & {
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '将在 stream 回复完毕后触发',
|
description: '将在 stream 回复完毕后触发',
|
||||||
valueType: 'string',
|
valueType: 'string',
|
||||||
type: 'source',
|
type: 'source',
|
||||||
|
|||||||
@ -13,7 +13,7 @@ export enum FlowInputItemTypeEnum {
|
|||||||
chatInput = 'chatInput',
|
chatInput = 'chatInput',
|
||||||
selectApp = 'selectApp',
|
selectApp = 'selectApp',
|
||||||
// chat special input
|
// chat special input
|
||||||
quoteList = 'quoteList',
|
aiSettings = 'aiSettings',
|
||||||
maxToken = 'maxToken',
|
maxToken = 'maxToken',
|
||||||
selectChatModel = 'selectChatModel',
|
selectChatModel = 'selectChatModel',
|
||||||
// dataset special input
|
// dataset special input
|
||||||
|
|||||||
@ -1,5 +1,98 @@
|
|||||||
import type { AppSchema } from '@/types/mongoSchema';
|
import type { AppSchema } from '@/types/mongoSchema';
|
||||||
import type { OutLinkEditType } from '@fastgpt/support/outLink/type.d';
|
import type { OutLinkEditType } from '@fastgpt/support/outLink/type.d';
|
||||||
|
import type {
|
||||||
|
LLMModelItemType,
|
||||||
|
ChatModelItemType,
|
||||||
|
FunctionModelItemType,
|
||||||
|
VectorModelItemType
|
||||||
|
} from '@/types/model';
|
||||||
|
|
||||||
|
export const defaultChatModels: ChatModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo',
|
||||||
|
name: 'GPT35-4k',
|
||||||
|
price: 0,
|
||||||
|
maxToken: 4000,
|
||||||
|
quoteMaxToken: 2000,
|
||||||
|
maxTemperature: 1.2,
|
||||||
|
censor: false,
|
||||||
|
defaultSystemChatPrompt: ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo-16k',
|
||||||
|
name: 'GPT35-16k',
|
||||||
|
maxToken: 16000,
|
||||||
|
price: 0,
|
||||||
|
quoteMaxToken: 8000,
|
||||||
|
maxTemperature: 1.2,
|
||||||
|
censor: false,
|
||||||
|
defaultSystemChatPrompt: ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: 'gpt-4',
|
||||||
|
name: 'GPT4-8k',
|
||||||
|
maxToken: 8000,
|
||||||
|
price: 0,
|
||||||
|
quoteMaxToken: 4000,
|
||||||
|
maxTemperature: 1.2,
|
||||||
|
censor: false,
|
||||||
|
defaultSystemChatPrompt: ''
|
||||||
|
}
|
||||||
|
];
|
||||||
|
export const defaultQAModels: LLMModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo-16k',
|
||||||
|
name: 'GPT35-16k',
|
||||||
|
maxToken: 16000,
|
||||||
|
price: 0
|
||||||
|
}
|
||||||
|
];
|
||||||
|
export const defaultCQModels: FunctionModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo-16k',
|
||||||
|
name: 'GPT35-16k',
|
||||||
|
maxToken: 16000,
|
||||||
|
price: 0,
|
||||||
|
functionCall: true,
|
||||||
|
functionPrompt: ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
model: 'gpt-4',
|
||||||
|
name: 'GPT4-8k',
|
||||||
|
maxToken: 8000,
|
||||||
|
price: 0,
|
||||||
|
functionCall: true,
|
||||||
|
functionPrompt: ''
|
||||||
|
}
|
||||||
|
];
|
||||||
|
export const defaultExtractModels: FunctionModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo-16k',
|
||||||
|
name: 'GPT35-16k',
|
||||||
|
maxToken: 16000,
|
||||||
|
price: 0,
|
||||||
|
functionCall: true,
|
||||||
|
functionPrompt: ''
|
||||||
|
}
|
||||||
|
];
|
||||||
|
export const defaultQGModels: LLMModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'gpt-3.5-turbo',
|
||||||
|
name: 'GPT35-4K',
|
||||||
|
maxToken: 4000,
|
||||||
|
price: 0
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
export const defaultVectorModels: VectorModelItemType[] = [
|
||||||
|
{
|
||||||
|
model: 'text-embedding-ada-002',
|
||||||
|
name: 'Embedding-2',
|
||||||
|
price: 0,
|
||||||
|
defaultToken: 500,
|
||||||
|
maxToken: 3000
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
export const defaultApp: AppSchema = {
|
export const defaultApp: AppSchema = {
|
||||||
_id: '',
|
_id: '',
|
||||||
|
|||||||
@ -1,14 +1,17 @@
|
|||||||
import {
|
import type {
|
||||||
type QAModelItemType,
|
ChatModelItemType,
|
||||||
type ChatModelItemType,
|
FunctionModelItemType,
|
||||||
type VectorModelItemType,
|
LLMModelItemType,
|
||||||
FunctionModelItemType
|
VectorModelItemType
|
||||||
} from '@/types/model';
|
} from '@/types/model';
|
||||||
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
|
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
|
||||||
|
|
||||||
export type InitDateResponse = {
|
export type InitDateResponse = {
|
||||||
chatModels: ChatModelItemType[];
|
chatModels: ChatModelItemType[];
|
||||||
qaModel: QAModelItemType;
|
qaModels: LLMModelItemType[];
|
||||||
|
cqModels: FunctionModelItemType[];
|
||||||
|
extractModels: FunctionModelItemType[];
|
||||||
|
qgModels: LLMModelItemType[];
|
||||||
vectorModels: VectorModelItemType[];
|
vectorModels: VectorModelItemType[];
|
||||||
feConfigs: FeConfigsType;
|
feConfigs: FeConfigsType;
|
||||||
priceMd: string;
|
priceMd: string;
|
||||||
|
|||||||
@ -1,5 +1,23 @@
|
|||||||
export const defaultQuoteTemplate = `{instruction:"{{q}}",output:"{{a}}"}`;
|
import { PromptTemplateItem } from '@fastgpt/core/ai/type.d';
|
||||||
export const defaultQuotePrompt = `你的背景知识:
|
|
||||||
|
export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
|
||||||
|
{
|
||||||
|
title: '标准模板',
|
||||||
|
desc: '包含 q 和 a 两个变量的标准模板',
|
||||||
|
value: `{instruction:"{{q}}",output:"{{a}}"}`
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: '全部变量',
|
||||||
|
desc: '包含 q 和 a 两个变量的标准模板',
|
||||||
|
value: `{instruction:"{{q}}",output:"{{a}}",source:"{{source}}",file_id:"{{file_id}}",index:"{{index}}"}`
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
export const Prompt_QuotePromptList: PromptTemplateItem[] = [
|
||||||
|
{
|
||||||
|
title: '标准模式',
|
||||||
|
desc: '',
|
||||||
|
value: `你的背景知识:
|
||||||
"""
|
"""
|
||||||
{{quote}}
|
{{quote}}
|
||||||
"""
|
"""
|
||||||
@ -7,4 +25,19 @@ export const defaultQuotePrompt = `你的背景知识:
|
|||||||
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
||||||
2. 使用背景知识回答问题。
|
2. 使用背景知识回答问题。
|
||||||
3. 背景知识无法满足问题时,你需严谨的回答问题。
|
3. 背景知识无法满足问题时,你需严谨的回答问题。
|
||||||
我的问题是:"{{question}}"`;
|
我的问题是:"{{question}}"`
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: '严格模式',
|
||||||
|
desc: '',
|
||||||
|
value: `你的背景知识:
|
||||||
|
"""
|
||||||
|
{{quote}}
|
||||||
|
"""
|
||||||
|
对话要求:
|
||||||
|
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
||||||
|
2. 使用背景知识回答问题。
|
||||||
|
3. 背景知识无法满足问题时,你需要回答:我不清楚关于xxx的内容。
|
||||||
|
我的问题是:"{{question}}"`
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|||||||
@ -32,8 +32,6 @@ function Error() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function getServerSideProps(context: any) {
|
export async function getServerSideProps(context: any) {
|
||||||
console.log('[render error]: ', context);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
props: { ...(await serviceSideProps(context)) }
|
props: { ...(await serviceSideProps(context)) }
|
||||||
};
|
};
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { connectToDatabase } from '@/service/mongo';
|
|||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { sseErrRes } from '@/service/response';
|
import { sseErrRes } from '@/service/response';
|
||||||
import { sseResponseEventEnum } from '@/constants/chat';
|
import { sseResponseEventEnum } from '@/constants/chat';
|
||||||
import { sseResponse } from '@/service/utils/tools';
|
import { responseWrite } from '@fastgpt/common/tools/stream';
|
||||||
import { AppModuleItemType } from '@/types/app';
|
import { AppModuleItemType } from '@/types/app';
|
||||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||||
import { pushChatBill } from '@/service/common/bill/push';
|
import { pushChatBill } from '@/service/common/bill/push';
|
||||||
@ -59,12 +59,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
detail: true
|
detail: true
|
||||||
});
|
});
|
||||||
|
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.answer,
|
event: sseResponseEventEnum.answer,
|
||||||
data: '[DONE]'
|
data: '[DONE]'
|
||||||
});
|
});
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.appStreamResponse,
|
event: sseResponseEventEnum.appStreamResponse,
|
||||||
data: JSON.stringify(responseData)
|
data: JSON.stringify(responseData)
|
||||||
|
|||||||
@ -6,7 +6,8 @@ import { authUser } from '@fastgpt/support/user/auth';
|
|||||||
import { ChatItemType } from '@/types/chat';
|
import { ChatItemType } from '@/types/chat';
|
||||||
import { authApp } from '@/service/utils/auth';
|
import { authApp } from '@/service/utils/auth';
|
||||||
import type { ChatSchema } from '@/types/mongoSchema';
|
import type { ChatSchema } from '@/types/mongoSchema';
|
||||||
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
|
import { getGuideModule } from '@/components/ChatBox/utils';
|
||||||
|
import { getChatModelNameListByModules } from '@/service/core/app/module';
|
||||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||||
|
|
||||||
/* 初始化我的聊天框,需要身份验证 */
|
/* 初始化我的聊天框,需要身份验证 */
|
||||||
@ -83,7 +84,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
appId,
|
appId,
|
||||||
app: {
|
app: {
|
||||||
userGuideModule: getGuideModule(app.modules),
|
userGuideModule: getGuideModule(app.modules),
|
||||||
chatModels: getChatModelNameList(app.modules),
|
chatModels: getChatModelNameListByModules(app.modules),
|
||||||
name: app.name,
|
name: app.name,
|
||||||
avatar: app.avatar,
|
avatar: app.avatar,
|
||||||
intro: app.intro,
|
intro: app.intro,
|
||||||
|
|||||||
@ -12,6 +12,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
|
|
||||||
const { userId } = await authUser({ req, authToken: true, authApiKey: true });
|
const { userId } = await authUser({ req, authToken: true, authApiKey: true });
|
||||||
|
|
||||||
|
const qaModel = global.qaModels[0];
|
||||||
|
|
||||||
const { _id } = await Bill.create({
|
const { _id } = await Bill.create({
|
||||||
userId,
|
userId,
|
||||||
appName: name,
|
appName: name,
|
||||||
@ -25,7 +27,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
moduleName: 'QA 拆分',
|
moduleName: 'QA 拆分',
|
||||||
model: global.qaModel.name,
|
model: qaModel?.name,
|
||||||
amount: 0,
|
amount: 0,
|
||||||
tokenLen: 0
|
tokenLen: 0
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import { connectToDatabase } from '@/service/mongo';
|
|||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import type { CreateQuestionGuideParams } from '@/global/core/api/aiReq.d';
|
import type { CreateQuestionGuideParams } from '@/global/core/api/aiReq.d';
|
||||||
import { pushQuestionGuideBill } from '@/service/common/bill/push';
|
import { pushQuestionGuideBill } from '@/service/common/bill/push';
|
||||||
import { defaultQGModel } from '@/pages/api/system/getInitData';
|
|
||||||
import { createQuestionGuide } from '@fastgpt/core/ai/functions/createQuestionGuide';
|
import { createQuestionGuide } from '@fastgpt/core/ai/functions/createQuestionGuide';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
@ -23,9 +22,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
|||||||
throw new Error('user not found');
|
throw new Error('user not found');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const qgModel = global.qgModels[0];
|
||||||
|
|
||||||
const { result, tokens } = await createQuestionGuide({
|
const { result, tokens } = await createQuestionGuide({
|
||||||
messages,
|
messages,
|
||||||
model: (global.qgModel || defaultQGModel).model
|
model: qgModel.model
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res, {
|
jsonRes(res, {
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
|
|||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { getVectorModel } from '@/service/utils/data';
|
import { getVectorModel } from '@/service/core/ai/model';
|
||||||
import type { DatasetsItemType } from '@/types/core/dataset';
|
import type { DatasetsItemType } from '@/types/core/dataset';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { PgClient } from '@/service/pg';
|
import { PgClient } from '@/service/pg';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { PgDatasetTableName } from '@/constants/plugin';
|
import { PgDatasetTableName } from '@/constants/plugin';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
|
|
||||||
|
|||||||
@ -8,7 +8,7 @@ import { findAllChildrenIds } from '../delete';
|
|||||||
import QueryStream from 'pg-query-stream';
|
import QueryStream from 'pg-query-stream';
|
||||||
import { PgClient } from '@/service/pg';
|
import { PgClient } from '@/service/pg';
|
||||||
import { addLog } from '@/service/utils/tools';
|
import { addLog } from '@/service/utils/tools';
|
||||||
import { responseWriteController } from '@/service/common/stream';
|
import { responseWriteController } from '@fastgpt/common/tools/stream';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -7,10 +7,10 @@ import { jsonRes } from '@/service/response';
|
|||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { authDataset } from '@/service/utils/auth';
|
import { authDataset } from '@/service/utils/auth';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { PgDatasetTableName } from '@/constants/plugin';
|
import { PgDatasetTableName } from '@/constants/plugin';
|
||||||
import { insertData2Dataset, PgClient } from '@/service/pg';
|
import { insertData2Dataset, PgClient } from '@/service/pg';
|
||||||
import { getVectorModel } from '@/service/utils/data';
|
import { getVectorModel } from '@/service/core/ai/model';
|
||||||
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
||||||
import { DatasetDataItemType } from '@/types/core/dataset/data';
|
import { DatasetDataItemType } from '@/types/core/dataset/data';
|
||||||
import { countPromptTokens } from '@/utils/common/tiktoken';
|
import { countPromptTokens } from '@/utils/common/tiktoken';
|
||||||
|
|||||||
@ -5,15 +5,15 @@ import { connectToDatabase, TrainingData } from '@/service/mongo';
|
|||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { authDataset } from '@/service/utils/auth';
|
import { authDataset } from '@/service/utils/auth';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { TrainingModeEnum } from '@/constants/plugin';
|
import { TrainingModeEnum } from '@/constants/plugin';
|
||||||
import { startQueue } from '@/service/utils/tools';
|
import { startQueue } from '@/service/utils/tools';
|
||||||
import { getVectorModel } from '@/service/utils/data';
|
|
||||||
import { DatasetDataItemType } from '@/types/core/dataset/data';
|
import { DatasetDataItemType } from '@/types/core/dataset/data';
|
||||||
import { countPromptTokens } from '@/utils/common/tiktoken';
|
import { countPromptTokens } from '@/utils/common/tiktoken';
|
||||||
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
|
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
|
||||||
import type { PushDataProps } from '@/global/core/api/datasetReq.d';
|
import type { PushDataProps } from '@/global/core/api/datasetReq.d';
|
||||||
import { authFileIdValid } from '@/service/dataset/auth';
|
import { authFileIdValid } from '@/service/dataset/auth';
|
||||||
|
import { getVectorModel } from '@/service/core/ai/model';
|
||||||
|
|
||||||
const modeMap = {
|
const modeMap = {
|
||||||
[TrainingModeEnum.index]: true,
|
[TrainingModeEnum.index]: true,
|
||||||
@ -71,7 +71,7 @@ export async function pushDataToKb({
|
|||||||
if (mode === TrainingModeEnum.index) {
|
if (mode === TrainingModeEnum.index) {
|
||||||
const vectorModel = (await MongoDataset.findById(kbId, 'vectorModel'))?.vectorModel;
|
const vectorModel = (await MongoDataset.findById(kbId, 'vectorModel'))?.vectorModel;
|
||||||
|
|
||||||
return getVectorModel(vectorModel || global.vectorModels[0].model);
|
return getVectorModel(vectorModel);
|
||||||
}
|
}
|
||||||
return global.vectorModels[0];
|
return global.vectorModels[0];
|
||||||
})()
|
})()
|
||||||
@ -79,7 +79,7 @@ export async function pushDataToKb({
|
|||||||
|
|
||||||
const modeMaxToken = {
|
const modeMaxToken = {
|
||||||
[TrainingModeEnum.index]: vectorModel.maxToken * 1.5,
|
[TrainingModeEnum.index]: vectorModel.maxToken * 1.5,
|
||||||
[TrainingModeEnum.qa]: global.qaModel.maxToken * 0.8
|
[TrainingModeEnum.qa]: global.qaModels[0].maxToken * 0.8
|
||||||
};
|
};
|
||||||
|
|
||||||
// filter repeat or equal content
|
// filter repeat or equal content
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { PgClient } from '@/service/pg';
|
import { PgClient } from '@/service/pg';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { getVectorModel } from '@/service/utils/data';
|
import { getVectorModel } from '@/service/core/ai/model';
|
||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { getVectorModel } from '@/service/utils/data';
|
import { getVectorModel } from '@/service/core/ai/model';
|
||||||
import type { DatasetsItemType } from '@/types/core/dataset';
|
import type { DatasetsItemType } from '@/types/core/dataset';
|
||||||
import { DatasetTypeEnum } from '@fastgpt/core/dataset/constant';
|
import { DatasetTypeEnum } from '@fastgpt/core/dataset/constant';
|
||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { PgClient } from '@/service/pg';
|
import { PgClient } from '@/service/pg';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { getVector } from '../../openapi/plugin/vector';
|
import { getVector } from '../../openapi/plugin/vector';
|
||||||
import { PgDatasetTableName } from '@/constants/plugin';
|
import { PgDatasetTableName } from '@/constants/plugin';
|
||||||
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/core/dataset/schema';
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { authBalanceByUid, authUser } from '@fastgpt/support/user/auth';
|
import { authBalanceByUid, authUser } from '@fastgpt/support/user/auth';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { getAIApi } from '@fastgpt/core/ai/config';
|
import { getAIApi } from '@fastgpt/core/ai/config';
|
||||||
import { pushGenerateVectorBill } from '@/service/common/bill/push';
|
import { pushGenerateVectorBill } from '@/service/common/bill/push';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { withNextCors } from '@/service/utils/tools';
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import ChatCompletion from '@/pages/api/v1/chat/completions';
|
import ChatCompletion from '@/pages/api/v1/chat/completions';
|
||||||
|
|
||||||
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
|
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
|
|||||||
@ -6,8 +6,9 @@ import { MongoUser } from '@fastgpt/support/user/schema';
|
|||||||
import type { InitShareChatResponse } from '@/global/support/api/outLinkRes.d';
|
import type { InitShareChatResponse } from '@/global/support/api/outLinkRes.d';
|
||||||
import { authApp } from '@/service/utils/auth';
|
import { authApp } from '@/service/utils/auth';
|
||||||
import { HUMAN_ICON } from '@/constants/chat';
|
import { HUMAN_ICON } from '@/constants/chat';
|
||||||
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
|
import { getGuideModule } from '@/components/ChatBox/utils';
|
||||||
import { authShareChatInit } from '@fastgpt/support/outLink/auth';
|
import { authShareChatInit } from '@fastgpt/support/outLink/auth';
|
||||||
|
import { getChatModelNameListByModules } from '@/service/core/app/module';
|
||||||
|
|
||||||
/* init share chat window */
|
/* init share chat window */
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
@ -51,7 +52,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
userAvatar: user?.avatar || HUMAN_ICON,
|
userAvatar: user?.avatar || HUMAN_ICON,
|
||||||
app: {
|
app: {
|
||||||
userGuideModule: getGuideModule(app.modules),
|
userGuideModule: getGuideModule(app.modules),
|
||||||
chatModels: getChatModelNameList(app.modules),
|
chatModels: getChatModelNameListByModules(app.modules),
|
||||||
name: app.name,
|
name: app.name,
|
||||||
avatar: app.avatar,
|
avatar: app.avatar,
|
||||||
intro: app.intro
|
intro: app.intro
|
||||||
|
|||||||
@ -4,10 +4,23 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
|||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { readFileSync } from 'fs';
|
import { readFileSync } from 'fs';
|
||||||
import type { InitDateResponse } from '@/global/common/api/systemRes';
|
import type { InitDateResponse } from '@/global/common/api/systemRes';
|
||||||
import type { VectorModelItemType, FunctionModelItemType } from '@/types/model';
|
|
||||||
import { formatPrice } from '@fastgpt/common/bill';
|
import { formatPrice } from '@fastgpt/common/bill';
|
||||||
import { getTikTokenEnc } from '@/utils/common/tiktoken';
|
import { getTikTokenEnc } from '@/utils/common/tiktoken';
|
||||||
import { initHttpAgent } from '@fastgpt/core/init';
|
import { initHttpAgent } from '@fastgpt/core/init';
|
||||||
|
import {
|
||||||
|
defaultChatModels,
|
||||||
|
defaultQAModels,
|
||||||
|
defaultCQModels,
|
||||||
|
defaultExtractModels,
|
||||||
|
defaultQGModels,
|
||||||
|
defaultVectorModels
|
||||||
|
} from '@/constants/model';
|
||||||
|
import {
|
||||||
|
ChatModelItemType,
|
||||||
|
FunctionModelItemType,
|
||||||
|
LLMModelItemType,
|
||||||
|
VectorModelItemType
|
||||||
|
} from '@/types/model';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
getInitConfig();
|
getInitConfig();
|
||||||
@ -17,7 +30,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
data: {
|
data: {
|
||||||
feConfigs: global.feConfigs,
|
feConfigs: global.feConfigs,
|
||||||
chatModels: global.chatModels,
|
chatModels: global.chatModels,
|
||||||
qaModel: global.qaModel,
|
qaModels: global.qaModels,
|
||||||
|
cqModels: global.cqModels,
|
||||||
|
extractModels: global.extractModels,
|
||||||
|
qgModels: global.qgModels,
|
||||||
vectorModels: global.vectorModels,
|
vectorModels: global.vectorModels,
|
||||||
priceMd: global.priceMd,
|
priceMd: global.priceMd,
|
||||||
systemVersion: global.systemVersion || '0.0.0'
|
systemVersion: global.systemVersion || '0.0.0'
|
||||||
@ -42,72 +58,6 @@ const defaultFeConfigs: FeConfigsType = {
|
|||||||
},
|
},
|
||||||
scripts: []
|
scripts: []
|
||||||
};
|
};
|
||||||
const defaultChatModels = [
|
|
||||||
{
|
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
name: 'GPT35-4k',
|
|
||||||
contextMaxToken: 4000,
|
|
||||||
quoteMaxToken: 2400,
|
|
||||||
maxTemperature: 1.2,
|
|
||||||
price: 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
model: 'gpt-3.5-turbo-16k',
|
|
||||||
name: 'GPT35-16k',
|
|
||||||
contextMaxToken: 16000,
|
|
||||||
quoteMaxToken: 8000,
|
|
||||||
maxTemperature: 1.2,
|
|
||||||
price: 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
model: 'gpt-4',
|
|
||||||
name: 'GPT4-8k',
|
|
||||||
contextMaxToken: 8000,
|
|
||||||
quoteMaxToken: 4000,
|
|
||||||
maxTemperature: 1.2,
|
|
||||||
price: 0
|
|
||||||
}
|
|
||||||
];
|
|
||||||
const defaultQAModel = {
|
|
||||||
model: 'gpt-3.5-turbo-16k',
|
|
||||||
name: 'GPT35-16k',
|
|
||||||
maxToken: 16000,
|
|
||||||
price: 0
|
|
||||||
};
|
|
||||||
export const defaultExtractModel: FunctionModelItemType = {
|
|
||||||
model: 'gpt-3.5-turbo-16k',
|
|
||||||
name: 'GPT35-16k',
|
|
||||||
maxToken: 16000,
|
|
||||||
price: 0,
|
|
||||||
prompt: '',
|
|
||||||
functionCall: true
|
|
||||||
};
|
|
||||||
export const defaultCQModel: FunctionModelItemType = {
|
|
||||||
model: 'gpt-3.5-turbo-16k',
|
|
||||||
name: 'GPT35-16k',
|
|
||||||
maxToken: 16000,
|
|
||||||
price: 0,
|
|
||||||
prompt: '',
|
|
||||||
functionCall: true
|
|
||||||
};
|
|
||||||
export const defaultQGModel: FunctionModelItemType = {
|
|
||||||
model: 'gpt-3.5-turbo',
|
|
||||||
name: 'FastAI-4k',
|
|
||||||
maxToken: 4000,
|
|
||||||
price: 1.5,
|
|
||||||
prompt: '',
|
|
||||||
functionCall: false
|
|
||||||
};
|
|
||||||
|
|
||||||
const defaultVectorModels: VectorModelItemType[] = [
|
|
||||||
{
|
|
||||||
model: 'text-embedding-ada-002',
|
|
||||||
name: 'Embedding-2',
|
|
||||||
price: 0,
|
|
||||||
defaultToken: 500,
|
|
||||||
maxToken: 3000
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
export function initGlobal() {
|
export function initGlobal() {
|
||||||
// init tikToken
|
// init tikToken
|
||||||
@ -127,7 +77,16 @@ export function getInitConfig() {
|
|||||||
|
|
||||||
const filename =
|
const filename =
|
||||||
process.env.NODE_ENV === 'development' ? 'data/config.local.json' : '/app/data/config.json';
|
process.env.NODE_ENV === 'development' ? 'data/config.local.json' : '/app/data/config.json';
|
||||||
const res = JSON.parse(readFileSync(filename, 'utf-8'));
|
const res = JSON.parse(readFileSync(filename, 'utf-8')) as {
|
||||||
|
FeConfig: FeConfigsType;
|
||||||
|
SystemParams: SystemEnvType;
|
||||||
|
ChatModels: ChatModelItemType[];
|
||||||
|
QAModels: LLMModelItemType[];
|
||||||
|
CQModels: FunctionModelItemType[];
|
||||||
|
ExtractModels: FunctionModelItemType[];
|
||||||
|
QGModels: LLMModelItemType[];
|
||||||
|
VectorModels: VectorModelItemType[];
|
||||||
|
};
|
||||||
|
|
||||||
console.log(`System Version: ${global.systemVersion}`);
|
console.log(`System Version: ${global.systemVersion}`);
|
||||||
|
|
||||||
@ -137,11 +96,13 @@ export function getInitConfig() {
|
|||||||
? { ...defaultSystemEnv, ...res.SystemParams }
|
? { ...defaultSystemEnv, ...res.SystemParams }
|
||||||
: defaultSystemEnv;
|
: defaultSystemEnv;
|
||||||
global.feConfigs = res.FeConfig ? { ...defaultFeConfigs, ...res.FeConfig } : defaultFeConfigs;
|
global.feConfigs = res.FeConfig ? { ...defaultFeConfigs, ...res.FeConfig } : defaultFeConfigs;
|
||||||
|
|
||||||
global.chatModels = res.ChatModels || defaultChatModels;
|
global.chatModels = res.ChatModels || defaultChatModels;
|
||||||
global.qaModel = res.QAModel || defaultQAModel;
|
global.qaModels = res.QAModels || defaultQAModels;
|
||||||
global.extractModel = res.ExtractModel || defaultExtractModel;
|
global.cqModels = res.CQModels || defaultCQModels;
|
||||||
global.cqModel = res.CQModel || defaultCQModel;
|
global.extractModels = res.ExtractModels || defaultExtractModels;
|
||||||
global.qgModel = res.QGModel || defaultQGModel;
|
global.qgModels = res.QGModels || defaultQGModels;
|
||||||
|
|
||||||
global.vectorModels = res.VectorModels || defaultVectorModels;
|
global.vectorModels = res.VectorModels || defaultVectorModels;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
setDefaultData();
|
setDefaultData();
|
||||||
@ -152,13 +113,27 @@ export function getInitConfig() {
|
|||||||
export function setDefaultData() {
|
export function setDefaultData() {
|
||||||
global.systemEnv = defaultSystemEnv;
|
global.systemEnv = defaultSystemEnv;
|
||||||
global.feConfigs = defaultFeConfigs;
|
global.feConfigs = defaultFeConfigs;
|
||||||
|
|
||||||
global.chatModels = defaultChatModels;
|
global.chatModels = defaultChatModels;
|
||||||
global.qaModel = defaultQAModel;
|
global.qaModels = defaultQAModels;
|
||||||
|
global.cqModels = defaultCQModels;
|
||||||
|
global.extractModels = defaultExtractModels;
|
||||||
|
global.qgModels = defaultQGModels;
|
||||||
|
|
||||||
global.vectorModels = defaultVectorModels;
|
global.vectorModels = defaultVectorModels;
|
||||||
global.extractModel = defaultExtractModel;
|
|
||||||
global.cqModel = defaultCQModel;
|
|
||||||
global.qgModel = defaultQGModel;
|
|
||||||
global.priceMd = '';
|
global.priceMd = '';
|
||||||
|
|
||||||
|
console.log('use default config');
|
||||||
|
console.log({
|
||||||
|
feConfigs: defaultFeConfigs,
|
||||||
|
systemEnv: defaultSystemEnv,
|
||||||
|
chatModels: defaultChatModels,
|
||||||
|
qaModels: defaultQAModels,
|
||||||
|
cqModels: defaultCQModels,
|
||||||
|
extractModels: defaultExtractModels,
|
||||||
|
qgModels: defaultQGModels,
|
||||||
|
vectorModels: defaultVectorModels
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSystemVersion() {
|
export function getSystemVersion() {
|
||||||
@ -187,10 +162,18 @@ ${global.vectorModels
|
|||||||
${global.chatModels
|
${global.chatModels
|
||||||
?.map((item) => `| 对话-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
?.map((item) => `| 对话-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
||||||
.join('\n')}
|
.join('\n')}
|
||||||
| 文件QA拆分 | ${formatPrice(global.qaModel?.price, 1000)} |
|
${global.qaModels
|
||||||
| 高级编排 - 问题分类 | ${formatPrice(global.cqModel?.price, 1000)} |
|
?.map((item) => `| 文件QA拆分-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
||||||
| 高级编排 - 内容提取 | ${formatPrice(global.extractModel?.price, 1000)} |
|
.join('\n')}
|
||||||
| 下一步指引 | ${formatPrice(global.qgModel?.price, 1000)} |
|
${global.cqModels
|
||||||
|
?.map((item) => `| 问题分类-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
||||||
|
.join('\n')}
|
||||||
|
${global.extractModels
|
||||||
|
?.map((item) => `| 内容提取-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
||||||
|
.join('\n')}
|
||||||
|
${global.qgModels
|
||||||
|
?.map((item) => `| 下一步指引-${item.name} | ${formatPrice(item.price, 1000)} |`)
|
||||||
|
.join('\n')}
|
||||||
`;
|
`;
|
||||||
console.log(global.priceMd);
|
console.log(global.priceMd);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,8 +2,8 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { MongoUser } from '@fastgpt/support/user/schema';
|
import { MongoUser } from '@fastgpt/support/user/schema';
|
||||||
import { setCookie } from '@/service/utils/tools';
|
import { setCookie } from '@fastgpt/support/user/auth';
|
||||||
import { generateToken } from '@fastgpt/support/user/tools';
|
import { generateToken } from '@fastgpt/support/user/auth';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@/service/response';
|
import { jsonRes } from '@/service/response';
|
||||||
import { clearCookie } from '@/service/utils/tools';
|
import { clearCookie } from '@fastgpt/support/user/auth';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -3,7 +3,8 @@ import { authApp } from '@/service/utils/auth';
|
|||||||
import { authUser } from '@fastgpt/support/user/auth';
|
import { authUser } from '@fastgpt/support/user/auth';
|
||||||
import { AuthUserTypeEnum } from '@fastgpt/support/user/auth';
|
import { AuthUserTypeEnum } from '@fastgpt/support/user/auth';
|
||||||
import { sseErrRes, jsonRes } from '@/service/response';
|
import { sseErrRes, jsonRes } from '@/service/response';
|
||||||
import { addLog, withNextCors } from '@/service/utils/tools';
|
import { addLog } from '@/service/utils/tools';
|
||||||
|
import { withNextCors } from '@fastgpt/common/tools/nextjs';
|
||||||
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
|
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||||
import {
|
import {
|
||||||
dispatchHistory,
|
dispatchHistory,
|
||||||
@ -21,7 +22,7 @@ import type { MessageItemType } from '@/types/core/chat/type';
|
|||||||
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
|
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
|
||||||
import { getChatHistory } from './getHistory';
|
import { getChatHistory } from './getHistory';
|
||||||
import { saveChat } from '@/service/utils/chat/saveChat';
|
import { saveChat } from '@/service/utils/chat/saveChat';
|
||||||
import { sseResponse } from '@/service/utils/tools';
|
import { responseWrite } from '@fastgpt/common/tools/stream';
|
||||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||||
import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
|
import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
|
||||||
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
|
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
|
||||||
@ -217,7 +218,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
const feResponseData = isOwner ? responseData : selectShareResponse({ responseData });
|
const feResponseData = isOwner ? responseData : selectShareResponse({ responseData });
|
||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: textAdaptGptResponse({
|
data: textAdaptGptResponse({
|
||||||
@ -225,14 +226,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
|||||||
finish_reason: 'stop'
|
finish_reason: 'stop'
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: '[DONE]'
|
data: '[DONE]'
|
||||||
});
|
});
|
||||||
|
|
||||||
if (responseDetail && detail) {
|
if (responseDetail && detail) {
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.appStreamResponse,
|
event: sseResponseEventEnum.appStreamResponse,
|
||||||
data: JSON.stringify(feResponseData)
|
data: JSON.stringify(feResponseData)
|
||||||
@ -323,13 +324,16 @@ export async function dispatchModules({
|
|||||||
let chatAnswerText = ''; // AI answer
|
let chatAnswerText = ''; // AI answer
|
||||||
let runningTime = Date.now();
|
let runningTime = Date.now();
|
||||||
|
|
||||||
function pushStore({
|
function pushStore(
|
||||||
answerText = '',
|
{ inputs = [] }: RunningModuleItemType,
|
||||||
responseData
|
{
|
||||||
}: {
|
answerText = '',
|
||||||
answerText?: string;
|
responseData
|
||||||
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
|
}: {
|
||||||
}) {
|
answerText?: string;
|
||||||
|
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
|
||||||
|
}
|
||||||
|
) {
|
||||||
const time = Date.now();
|
const time = Date.now();
|
||||||
if (responseData) {
|
if (responseData) {
|
||||||
if (Array.isArray(responseData)) {
|
if (Array.isArray(responseData)) {
|
||||||
@ -342,7 +346,12 @@ export async function dispatchModules({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
runningTime = time;
|
runningTime = time;
|
||||||
chatAnswerText += answerText;
|
|
||||||
|
const isResponseAnswerText =
|
||||||
|
inputs.find((item) => item.key === SystemInputEnum.isResponseAnswerText)?.value ?? true;
|
||||||
|
if (isResponseAnswerText) {
|
||||||
|
chatAnswerText += answerText;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
function moduleInput(
|
function moduleInput(
|
||||||
module: RunningModuleItemType,
|
module: RunningModuleItemType,
|
||||||
@ -376,7 +385,7 @@ export async function dispatchModules({
|
|||||||
module: RunningModuleItemType,
|
module: RunningModuleItemType,
|
||||||
result: Record<string, any> = {}
|
result: Record<string, any> = {}
|
||||||
): Promise<any> {
|
): Promise<any> {
|
||||||
pushStore(result);
|
pushStore(module, result);
|
||||||
return Promise.all(
|
return Promise.all(
|
||||||
module.outputs.map((outputItem) => {
|
module.outputs.map((outputItem) => {
|
||||||
if (result[outputItem.key] === undefined) return;
|
if (result[outputItem.key] === undefined) return;
|
||||||
@ -505,7 +514,7 @@ export function responseStatus({
|
|||||||
name?: string;
|
name?: string;
|
||||||
}) {
|
}) {
|
||||||
if (!name) return;
|
if (!name) return;
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.moduleStatus,
|
event: sseResponseEventEnum.moduleStatus,
|
||||||
data: JSON.stringify({
|
data: JSON.stringify({
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React from 'react';
|
import React, { useMemo, useState } from 'react';
|
||||||
import MyModal from '@/components/MyModal';
|
import MyModal from '@/components/MyModal';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { EditFormType } from '@/utils/app';
|
import { EditFormType } from '@/utils/app';
|
||||||
@ -11,43 +11,65 @@ import {
|
|||||||
Link,
|
Link,
|
||||||
ModalBody,
|
ModalBody,
|
||||||
ModalFooter,
|
ModalFooter,
|
||||||
|
Switch,
|
||||||
Textarea
|
Textarea
|
||||||
} from '@chakra-ui/react';
|
} from '@chakra-ui/react';
|
||||||
import MyTooltip from '@/components/MyTooltip';
|
import MyTooltip from '@/components/MyTooltip';
|
||||||
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||||
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/global/core/prompt/AIChat';
|
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
|
||||||
import { feConfigs } from '@/web/common/store/static';
|
import { chatModelList, feConfigs } from '@/web/common/store/static';
|
||||||
|
import MySlider from '@/components/Slider';
|
||||||
|
import { SystemInputEnum } from '@/constants/app';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
import { PromptTemplateItem } from '@fastgpt/core/ai/type';
|
||||||
|
|
||||||
|
const PromptTemplate = dynamic(() => import('@/components/PromptTemplate'));
|
||||||
|
|
||||||
const AIChatSettingsModal = ({
|
const AIChatSettingsModal = ({
|
||||||
|
isAdEdit,
|
||||||
onClose,
|
onClose,
|
||||||
onSuccess,
|
onSuccess,
|
||||||
defaultData
|
defaultData
|
||||||
}: {
|
}: {
|
||||||
|
isAdEdit?: boolean;
|
||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
onSuccess: (e: EditFormType['chatModel']) => void;
|
onSuccess: (e: EditFormType['chatModel']) => void;
|
||||||
defaultData: EditFormType['chatModel'];
|
defaultData: EditFormType['chatModel'];
|
||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const [refresh, setRefresh] = useState(false);
|
||||||
|
|
||||||
const { register, handleSubmit } = useForm({
|
const { register, handleSubmit, getValues, setValue } = useForm({
|
||||||
defaultValues: defaultData
|
defaultValues: defaultData
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const [selectTemplateData, setSelectTemplateData] = useState<{
|
||||||
|
title: string;
|
||||||
|
key: 'quoteTemplate' | 'quotePrompt';
|
||||||
|
templates: PromptTemplateItem[];
|
||||||
|
}>();
|
||||||
|
|
||||||
|
const tokenLimit = useMemo(() => {
|
||||||
|
return chatModelList.find((item) => item.model === getValues('model'))?.maxToken || 4000;
|
||||||
|
}, [getValues, refresh]);
|
||||||
|
|
||||||
const LabelStyles: BoxProps = {
|
const LabelStyles: BoxProps = {
|
||||||
fontWeight: 'bold',
|
|
||||||
mb: 1,
|
|
||||||
fontSize: ['sm', 'md']
|
fontSize: ['sm', 'md']
|
||||||
};
|
};
|
||||||
|
const selectTemplateBtn: BoxProps = {
|
||||||
|
color: 'myBlue.600',
|
||||||
|
cursor: 'pointer'
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<MyModal
|
<MyModal
|
||||||
isOpen
|
isOpen
|
||||||
title={
|
title={
|
||||||
<Flex alignItems={'flex-end'}>
|
<Flex alignItems={'flex-end'}>
|
||||||
{t('app.Quote Prompt Settings')}
|
{t('app.AI Settings')}
|
||||||
{feConfigs?.show_doc && (
|
{feConfigs?.show_doc && (
|
||||||
<Link
|
<Link
|
||||||
href={'https://doc.fastgpt.run/docs/use-cases/prompt/'}
|
href={'https://doc.fastgpt.run/docs/use-cases/ai_settings/'}
|
||||||
target={'_blank'}
|
target={'_blank'}
|
||||||
ml={1}
|
ml={1}
|
||||||
textDecoration={'underline'}
|
textDecoration={'underline'}
|
||||||
@ -59,39 +81,134 @@ const AIChatSettingsModal = ({
|
|||||||
)}
|
)}
|
||||||
</Flex>
|
</Flex>
|
||||||
}
|
}
|
||||||
|
isCentered
|
||||||
w={'700px'}
|
w={'700px'}
|
||||||
|
h={['90vh', 'auto']}
|
||||||
>
|
>
|
||||||
<ModalBody>
|
<ModalBody flex={['1 0 0', 'auto']} overflowY={'auto'}>
|
||||||
|
{isAdEdit && (
|
||||||
|
<Flex alignItems={'center'}>
|
||||||
|
<Box {...LabelStyles} w={'80px'}>
|
||||||
|
返回AI内容
|
||||||
|
</Box>
|
||||||
|
<Box flex={1} ml={'10px'}>
|
||||||
|
<Switch
|
||||||
|
isChecked={getValues(SystemInputEnum.isResponseAnswerText)}
|
||||||
|
size={'lg'}
|
||||||
|
onChange={(e) => {
|
||||||
|
const value = e.target.checked;
|
||||||
|
setValue(SystemInputEnum.isResponseAnswerText, value);
|
||||||
|
setRefresh((state) => !state);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
|
<Flex alignItems={'center'} mb={10} mt={isAdEdit ? 8 : 5}>
|
||||||
|
<Box {...LabelStyles} mr={2} w={'80px'}>
|
||||||
|
温度
|
||||||
|
</Box>
|
||||||
|
<Box flex={1} ml={'10px'}>
|
||||||
|
<MySlider
|
||||||
|
markList={[
|
||||||
|
{ label: '严谨', value: 0 },
|
||||||
|
{ label: '发散', value: 10 }
|
||||||
|
]}
|
||||||
|
width={'95%'}
|
||||||
|
min={0}
|
||||||
|
max={10}
|
||||||
|
value={getValues('temperature')}
|
||||||
|
onChange={(e) => {
|
||||||
|
setValue('temperature', e);
|
||||||
|
setRefresh(!refresh);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
<Flex alignItems={'center'} mt={12} mb={10}>
|
||||||
|
<Box {...LabelStyles} mr={2} w={'80px'}>
|
||||||
|
回复上限
|
||||||
|
</Box>
|
||||||
|
<Box flex={1} ml={'10px'}>
|
||||||
|
<MySlider
|
||||||
|
markList={[
|
||||||
|
{ label: '100', value: 100 },
|
||||||
|
{ label: `${tokenLimit}`, value: tokenLimit }
|
||||||
|
]}
|
||||||
|
width={'95%'}
|
||||||
|
min={100}
|
||||||
|
max={tokenLimit}
|
||||||
|
step={50}
|
||||||
|
value={getValues('maxToken')}
|
||||||
|
onChange={(val) => {
|
||||||
|
setValue('maxToken', val);
|
||||||
|
setRefresh(!refresh);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
<Box>
|
<Box>
|
||||||
<Box {...LabelStyles}>
|
<Flex {...LabelStyles} mb={1}>
|
||||||
引用内容模板
|
引用内容模板
|
||||||
<MyTooltip
|
<MyTooltip
|
||||||
label={t('template.Quote Content Tip', { default: defaultQuoteTemplate })}
|
label={t('template.Quote Content Tip', {
|
||||||
|
default: Prompt_QuoteTemplateList[0].value
|
||||||
|
})}
|
||||||
forceShow
|
forceShow
|
||||||
>
|
>
|
||||||
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
||||||
</MyTooltip>
|
</MyTooltip>
|
||||||
</Box>
|
<Box flex={1} />
|
||||||
|
<Box
|
||||||
|
{...selectTemplateBtn}
|
||||||
|
onClick={() =>
|
||||||
|
setSelectTemplateData({
|
||||||
|
title: '选择引用内容模板',
|
||||||
|
key: 'quoteTemplate',
|
||||||
|
templates: Prompt_QuoteTemplateList
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
选择模板
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
<Textarea
|
<Textarea
|
||||||
rows={4}
|
rows={6}
|
||||||
placeholder={t('template.Quote Content Tip', { default: defaultQuoteTemplate }) || ''}
|
placeholder={
|
||||||
|
t('template.Quote Content Tip', { default: Prompt_QuoteTemplateList[0].value }) || ''
|
||||||
|
}
|
||||||
borderColor={'myGray.100'}
|
borderColor={'myGray.100'}
|
||||||
{...register('quoteTemplate')}
|
{...register('quoteTemplate')}
|
||||||
/>
|
/>
|
||||||
</Box>
|
</Box>
|
||||||
<Box mt={4}>
|
<Box mt={4}>
|
||||||
<Box {...LabelStyles}>
|
<Flex {...LabelStyles} mb={1}>
|
||||||
引用内容提示词
|
引用内容提示词
|
||||||
<MyTooltip
|
<MyTooltip
|
||||||
label={t('template.Quote Prompt Tip', { default: defaultQuotePrompt })}
|
label={t('template.Quote Prompt Tip', { default: Prompt_QuotePromptList[0].value })}
|
||||||
forceShow
|
forceShow
|
||||||
>
|
>
|
||||||
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
||||||
</MyTooltip>
|
</MyTooltip>
|
||||||
</Box>
|
<Box flex={1} />
|
||||||
|
<Box
|
||||||
|
{...selectTemplateBtn}
|
||||||
|
onClick={() =>
|
||||||
|
setSelectTemplateData({
|
||||||
|
title: '选择引用提示词模板',
|
||||||
|
key: 'quotePrompt',
|
||||||
|
templates: Prompt_QuotePromptList
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
选择模板
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
<Textarea
|
<Textarea
|
||||||
rows={6}
|
rows={11}
|
||||||
placeholder={t('template.Quote Prompt Tip', { default: defaultQuotePrompt }) || ''}
|
placeholder={
|
||||||
|
t('template.Quote Prompt Tip', { default: Prompt_QuotePromptList[0].value }) || ''
|
||||||
|
}
|
||||||
borderColor={'myGray.100'}
|
borderColor={'myGray.100'}
|
||||||
{...register('quotePrompt')}
|
{...register('quotePrompt')}
|
||||||
/>
|
/>
|
||||||
@ -105,6 +222,14 @@ const AIChatSettingsModal = ({
|
|||||||
{t('Confirm')}
|
{t('Confirm')}
|
||||||
</Button>
|
</Button>
|
||||||
</ModalFooter>
|
</ModalFooter>
|
||||||
|
{!!selectTemplateData && (
|
||||||
|
<PromptTemplate
|
||||||
|
title={selectTemplateData.title}
|
||||||
|
templates={selectTemplateData.templates}
|
||||||
|
onClose={() => setSelectTemplateData(undefined)}
|
||||||
|
onSuccess={(e) => setValue(selectTemplateData.key, e)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</MyModal>
|
</MyModal>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -0,0 +1,229 @@
|
|||||||
|
import React, { useCallback, useRef, useState } from 'react';
|
||||||
|
import { Box, Flex, IconButton, useTheme, useDisclosure } from '@chakra-ui/react';
|
||||||
|
import { SmallCloseIcon } from '@chakra-ui/icons';
|
||||||
|
import { FlowInputItemTypeEnum } from '@/constants/flow';
|
||||||
|
import { FlowOutputTargetItemType } from '@/types/core/app/flow';
|
||||||
|
import { AppModuleItemType } from '@/types/app';
|
||||||
|
import { useRequest } from '@/web/common/hooks/useRequest';
|
||||||
|
import type { AppSchema } from '@/types/mongoSchema';
|
||||||
|
import { useUserStore } from '@/web/support/store/user';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { useCopyData } from '@/web/common/hooks/useCopyData';
|
||||||
|
import { AppTypeEnum } from '@/constants/app';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
|
||||||
|
import MyIcon from '@/components/Icon';
|
||||||
|
import MyTooltip from '@/components/MyTooltip';
|
||||||
|
import ChatTest, { type ChatTestComponentRef } from './ChatTest';
|
||||||
|
import { useFlowStore } from './Provider';
|
||||||
|
|
||||||
|
const ImportSettings = dynamic(() => import('./ImportSettings'));
|
||||||
|
|
||||||
|
type Props = { app: AppSchema; onCloseSettings: () => void };
|
||||||
|
|
||||||
|
const RenderHeaderContainer = React.memo(function RenderHeaderContainer({
|
||||||
|
app,
|
||||||
|
ChatTestRef,
|
||||||
|
testModules,
|
||||||
|
setTestModules,
|
||||||
|
onCloseSettings
|
||||||
|
}: Props & {
|
||||||
|
ChatTestRef: React.RefObject<ChatTestComponentRef>;
|
||||||
|
testModules?: AppModuleItemType[];
|
||||||
|
setTestModules: React.Dispatch<AppModuleItemType[] | undefined>;
|
||||||
|
}) {
|
||||||
|
const theme = useTheme();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { copyData } = useCopyData();
|
||||||
|
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
|
||||||
|
const { updateAppDetail } = useUserStore();
|
||||||
|
|
||||||
|
const { nodes, edges, onFixView } = useFlowStore();
|
||||||
|
|
||||||
|
const flow2AppModules = useCallback(() => {
|
||||||
|
const modules: AppModuleItemType[] = nodes.map((item) => ({
|
||||||
|
moduleId: item.data.moduleId,
|
||||||
|
name: item.data.name,
|
||||||
|
flowType: item.data.flowType,
|
||||||
|
showStatus: item.data.showStatus,
|
||||||
|
position: item.position,
|
||||||
|
inputs: item.data.inputs.map((item) => ({
|
||||||
|
...item,
|
||||||
|
connected: item.connected ?? item.type !== FlowInputItemTypeEnum.target
|
||||||
|
})),
|
||||||
|
outputs: item.data.outputs.map((item) => ({
|
||||||
|
...item,
|
||||||
|
targets: [] as FlowOutputTargetItemType[]
|
||||||
|
}))
|
||||||
|
}));
|
||||||
|
|
||||||
|
// update inputs and outputs
|
||||||
|
modules.forEach((module) => {
|
||||||
|
module.inputs.forEach((input) => {
|
||||||
|
input.connected =
|
||||||
|
input.connected ||
|
||||||
|
!!edges.find(
|
||||||
|
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
|
||||||
|
);
|
||||||
|
});
|
||||||
|
module.outputs.forEach((output) => {
|
||||||
|
output.targets = edges
|
||||||
|
.filter(
|
||||||
|
(edge) =>
|
||||||
|
edge.source === module.moduleId &&
|
||||||
|
edge.sourceHandle === output.key &&
|
||||||
|
edge.targetHandle
|
||||||
|
)
|
||||||
|
.map((edge) => ({
|
||||||
|
moduleId: edge.target,
|
||||||
|
key: edge.targetHandle || ''
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return modules;
|
||||||
|
}, [edges, nodes]);
|
||||||
|
|
||||||
|
const { mutate: onclickSave, isLoading } = useRequest({
|
||||||
|
mutationFn: () => {
|
||||||
|
const modules = flow2AppModules();
|
||||||
|
// check required connect
|
||||||
|
for (let i = 0; i < modules.length; i++) {
|
||||||
|
const item = modules[i];
|
||||||
|
if (item.inputs.find((input) => input.required && !input.connected)) {
|
||||||
|
return Promise.reject(`【${item.name}】存在未连接的必填输入`);
|
||||||
|
}
|
||||||
|
if (item.inputs.find((input) => input.valueCheck && !input.valueCheck(input.value))) {
|
||||||
|
return Promise.reject(`【${item.name}】存在为填写的必填项`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return updateAppDetail(app._id, {
|
||||||
|
modules: modules,
|
||||||
|
type: AppTypeEnum.advanced
|
||||||
|
});
|
||||||
|
},
|
||||||
|
successToast: '保存配置成功',
|
||||||
|
errorToast: '保存配置异常',
|
||||||
|
onSuccess() {
|
||||||
|
ChatTestRef.current?.resetChatTest();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Flex
|
||||||
|
py={3}
|
||||||
|
px={[2, 5, 8]}
|
||||||
|
borderBottom={theme.borders.base}
|
||||||
|
alignItems={'center'}
|
||||||
|
userSelect={'none'}
|
||||||
|
>
|
||||||
|
<MyTooltip label={'返回'} offset={[10, 10]}>
|
||||||
|
<IconButton
|
||||||
|
size={'sm'}
|
||||||
|
icon={<MyIcon name={'back'} w={'14px'} />}
|
||||||
|
borderRadius={'md'}
|
||||||
|
borderColor={'myGray.300'}
|
||||||
|
variant={'base'}
|
||||||
|
aria-label={''}
|
||||||
|
onClick={() => {
|
||||||
|
onCloseSettings();
|
||||||
|
onFixView();
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</MyTooltip>
|
||||||
|
<Box ml={[3, 6]} fontSize={['md', '2xl']} flex={1}>
|
||||||
|
{app.name}
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
<MyTooltip label={t('app.Import Configs')}>
|
||||||
|
<IconButton
|
||||||
|
mr={[3, 6]}
|
||||||
|
icon={<MyIcon name={'importLight'} w={['14px', '16px']} />}
|
||||||
|
borderRadius={'lg'}
|
||||||
|
variant={'base'}
|
||||||
|
aria-label={'save'}
|
||||||
|
onClick={onOpenImport}
|
||||||
|
/>
|
||||||
|
</MyTooltip>
|
||||||
|
<MyTooltip label={t('app.Export Configs')}>
|
||||||
|
<IconButton
|
||||||
|
mr={[3, 6]}
|
||||||
|
icon={<MyIcon name={'export'} w={['14px', '16px']} />}
|
||||||
|
borderRadius={'lg'}
|
||||||
|
variant={'base'}
|
||||||
|
aria-label={'save'}
|
||||||
|
onClick={() =>
|
||||||
|
copyData(
|
||||||
|
JSON.stringify(flow2AppModules(), null, 2),
|
||||||
|
t('app.Export Config Successful')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</MyTooltip>
|
||||||
|
|
||||||
|
{testModules ? (
|
||||||
|
<IconButton
|
||||||
|
mr={[3, 6]}
|
||||||
|
icon={<SmallCloseIcon fontSize={'25px'} />}
|
||||||
|
variant={'base'}
|
||||||
|
color={'myGray.600'}
|
||||||
|
borderRadius={'lg'}
|
||||||
|
aria-label={''}
|
||||||
|
onClick={() => setTestModules(undefined)}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<MyTooltip label={'测试对话'}>
|
||||||
|
<IconButton
|
||||||
|
mr={[3, 6]}
|
||||||
|
icon={<MyIcon name={'chat'} w={['14px', '16px']} />}
|
||||||
|
borderRadius={'lg'}
|
||||||
|
aria-label={'save'}
|
||||||
|
variant={'base'}
|
||||||
|
onClick={() => {
|
||||||
|
setTestModules(flow2AppModules());
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</MyTooltip>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<MyTooltip label={'保存配置'}>
|
||||||
|
<IconButton
|
||||||
|
icon={<MyIcon name={'save'} w={['14px', '16px']} />}
|
||||||
|
borderRadius={'lg'}
|
||||||
|
isLoading={isLoading}
|
||||||
|
aria-label={'save'}
|
||||||
|
onClick={onclickSave}
|
||||||
|
/>
|
||||||
|
</MyTooltip>
|
||||||
|
</Flex>
|
||||||
|
{isOpenImport && <ImportSettings onClose={onCloseImport} />}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
const Header = (props: Props) => {
|
||||||
|
const { app } = props;
|
||||||
|
const ChatTestRef = useRef<ChatTestComponentRef>(null);
|
||||||
|
|
||||||
|
const [testModules, setTestModules] = useState<AppModuleItemType[]>();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<RenderHeaderContainer
|
||||||
|
{...props}
|
||||||
|
ChatTestRef={ChatTestRef}
|
||||||
|
testModules={testModules}
|
||||||
|
setTestModules={setTestModules}
|
||||||
|
/>
|
||||||
|
<ChatTest
|
||||||
|
ref={ChatTestRef}
|
||||||
|
modules={testModules}
|
||||||
|
app={app}
|
||||||
|
onClose={() => setTestModules(undefined)}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default React.memo(Header);
|
||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useMemo } from 'react';
|
import React from 'react';
|
||||||
import { NodeProps } from 'reactflow';
|
import { NodeProps } from 'reactflow';
|
||||||
import NodeCard from '../modules/NodeCard';
|
import NodeCard from '../modules/NodeCard';
|
||||||
import { FlowModuleItemType } from '@/types/core/app/flow';
|
import { FlowModuleItemType } from '@/types/core/app/flow';
|
||||||
@ -7,11 +7,8 @@ import Container from '../modules/Container';
|
|||||||
import RenderInput from '../render/RenderInput';
|
import RenderInput from '../render/RenderInput';
|
||||||
import RenderOutput from '../render/RenderOutput';
|
import RenderOutput from '../render/RenderOutput';
|
||||||
|
|
||||||
import { useFlowStore } from '../Provider';
|
|
||||||
|
|
||||||
const NodeChat = ({ data }: NodeProps<FlowModuleItemType>) => {
|
const NodeChat = ({ data }: NodeProps<FlowModuleItemType>) => {
|
||||||
const { moduleId, inputs, outputs } = data;
|
const { moduleId, inputs, outputs } = data;
|
||||||
const { onChangeNode } = useFlowStore();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<NodeCard minW={'400px'} {...data}>
|
<NodeCard minW={'400px'} {...data}>
|
||||||
|
|||||||
@ -5,14 +5,11 @@ import {
|
|||||||
type EdgeChange,
|
type EdgeChange,
|
||||||
useNodesState,
|
useNodesState,
|
||||||
useEdgesState,
|
useEdgesState,
|
||||||
XYPosition,
|
|
||||||
useViewport,
|
|
||||||
Connection,
|
Connection,
|
||||||
addEdge
|
addEdge
|
||||||
} from 'reactflow';
|
} from 'reactflow';
|
||||||
import type {
|
import type {
|
||||||
FlowModuleItemType,
|
FlowModuleItemType,
|
||||||
FlowModuleTemplateType,
|
|
||||||
FlowOutputTargetItemType,
|
FlowOutputTargetItemType,
|
||||||
FlowModuleItemChangeProps
|
FlowModuleItemChangeProps
|
||||||
} from '@/types/core/app/flow';
|
} from '@/types/core/app/flow';
|
||||||
@ -44,7 +41,6 @@ export type useFlowStoreType = {
|
|||||||
setEdges: Dispatch<SetStateAction<Edge<any>[]>>;
|
setEdges: Dispatch<SetStateAction<Edge<any>[]>>;
|
||||||
onEdgesChange: OnChange<EdgeChange>;
|
onEdgesChange: OnChange<EdgeChange>;
|
||||||
onFixView: () => void;
|
onFixView: () => void;
|
||||||
onAddNode: (e: { template: FlowModuleTemplateType; position: XYPosition }) => void;
|
|
||||||
onDelNode: (nodeId: string) => void;
|
onDelNode: (nodeId: string) => void;
|
||||||
onChangeNode: (e: FlowModuleItemChangeProps) => void;
|
onChangeNode: (e: FlowModuleItemChangeProps) => void;
|
||||||
onCopyNode: (nodeId: string) => void;
|
onCopyNode: (nodeId: string) => void;
|
||||||
@ -80,9 +76,7 @@ const StateContext = createContext<useFlowStoreType>({
|
|||||||
onFixView: function (): void {
|
onFixView: function (): void {
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
onAddNode: function (e: { template: FlowModuleTemplateType; position: XYPosition }): void {
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
onDelNode: function (nodeId: string): void {
|
onDelNode: function (nodeId: string): void {
|
||||||
return;
|
return;
|
||||||
},
|
},
|
||||||
@ -117,7 +111,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
|
|||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const [nodes = [], setNodes, onNodesChange] = useNodesState<FlowModuleItemType>([]);
|
const [nodes = [], setNodes, onNodesChange] = useNodesState<FlowModuleItemType>([]);
|
||||||
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
const [edges, setEdges, onEdgesChange] = useEdgesState([]);
|
||||||
const { x, y, zoom } = useViewport();
|
|
||||||
|
|
||||||
const onFixView = useCallback(() => {
|
const onFixView = useCallback(() => {
|
||||||
const btn = document.querySelector('.react-flow__controls-fitview') as HTMLButtonElement;
|
const btn = document.querySelector('.react-flow__controls-fitview') as HTMLButtonElement;
|
||||||
@ -205,27 +198,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
|
|||||||
[nodes, onDelConnect, setEdges, t, toast]
|
[nodes, onDelConnect, setEdges, t, toast]
|
||||||
);
|
);
|
||||||
|
|
||||||
const onAddNode = useCallback(
|
|
||||||
({ template, position }: { template: FlowModuleTemplateType; position: XYPosition }) => {
|
|
||||||
if (!reactFlowWrapper.current) return;
|
|
||||||
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
|
|
||||||
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
|
|
||||||
const mouseY = (position.y - reactFlowBounds.top - y) / zoom;
|
|
||||||
setNodes((state) =>
|
|
||||||
state.concat(
|
|
||||||
appModule2FlowNode({
|
|
||||||
item: {
|
|
||||||
...template,
|
|
||||||
moduleId: nanoid(),
|
|
||||||
position: { x: mouseX, y: mouseY }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
);
|
|
||||||
},
|
|
||||||
[setNodes, x, y, zoom]
|
|
||||||
);
|
|
||||||
|
|
||||||
const onDelNode = useCallback(
|
const onDelNode = useCallback(
|
||||||
(nodeId: string) => {
|
(nodeId: string) => {
|
||||||
setNodes((state) => state.filter((item) => item.id !== nodeId));
|
setNodes((state) => state.filter((item) => item.id !== nodeId));
|
||||||
@ -338,7 +310,6 @@ export const FlowProvider = ({ appId, children }: { appId: string; children: Rea
|
|||||||
setEdges,
|
setEdges,
|
||||||
onEdgesChange,
|
onEdgesChange,
|
||||||
onFixView,
|
onFixView,
|
||||||
onAddNode,
|
|
||||||
onDelNode,
|
onDelNode,
|
||||||
onChangeNode,
|
onChangeNode,
|
||||||
onCopyNode,
|
onCopyNode,
|
||||||
|
|||||||
@ -1,24 +1,20 @@
|
|||||||
import React, { useMemo } from 'react';
|
import React, { useCallback, useMemo } from 'react';
|
||||||
import { Box, Flex } from '@chakra-ui/react';
|
import { Box, Flex } from '@chakra-ui/react';
|
||||||
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
|
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
|
||||||
import { FlowModuleItemType, FlowModuleTemplateType } from '@/types/core/app/flow';
|
import { FlowModuleTemplateType } from '@/types/core/app/flow';
|
||||||
import type { Node } from 'reactflow';
|
import { useViewport, XYPosition } from 'reactflow';
|
||||||
import { useGlobalStore } from '@/web/common/store/global';
|
import { useGlobalStore } from '@/web/common/store/global';
|
||||||
import Avatar from '@/components/Avatar';
|
import Avatar from '@/components/Avatar';
|
||||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||||
import { useFlowStore } from './Provider';
|
import { useFlowStore } from './Provider';
|
||||||
|
import { customAlphabet } from 'nanoid';
|
||||||
|
import { appModule2FlowNode } from '@/utils/adapt';
|
||||||
|
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
|
||||||
|
|
||||||
const ModuleTemplateList = ({
|
const ModuleTemplateList = ({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) => {
|
||||||
nodes,
|
const { nodes, setNodes, reactFlowWrapper } = useFlowStore();
|
||||||
isOpen,
|
|
||||||
onClose
|
|
||||||
}: {
|
|
||||||
nodes?: Node<FlowModuleItemType>[];
|
|
||||||
isOpen: boolean;
|
|
||||||
onClose: () => void;
|
|
||||||
}) => {
|
|
||||||
const { onAddNode } = useFlowStore();
|
|
||||||
const { isPc } = useGlobalStore();
|
const { isPc } = useGlobalStore();
|
||||||
|
const { x, y, zoom } = useViewport();
|
||||||
|
|
||||||
const filterTemplates = useMemo(() => {
|
const filterTemplates = useMemo(() => {
|
||||||
const guideModulesIndex = ModuleTemplates.findIndex((item) => item.label === '引导模块');
|
const guideModulesIndex = ModuleTemplates.findIndex((item) => item.label === '引导模块');
|
||||||
@ -47,6 +43,28 @@ const ModuleTemplateList = ({
|
|||||||
];
|
];
|
||||||
}, [nodes]);
|
}, [nodes]);
|
||||||
|
|
||||||
|
const onAddNode = useCallback(
|
||||||
|
({ template, position }: { template: FlowModuleTemplateType; position: XYPosition }) => {
|
||||||
|
if (!reactFlowWrapper?.current) return;
|
||||||
|
|
||||||
|
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
|
||||||
|
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
|
||||||
|
const mouseY = (position.y - reactFlowBounds.top - y) / zoom;
|
||||||
|
setNodes((state) =>
|
||||||
|
state.concat(
|
||||||
|
appModule2FlowNode({
|
||||||
|
item: {
|
||||||
|
...template,
|
||||||
|
moduleId: nanoid(),
|
||||||
|
position: { x: mouseX, y: mouseY }
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
[reactFlowWrapper, setNodes, x, y, zoom]
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Box
|
<Box
|
||||||
|
|||||||
@ -32,6 +32,7 @@ import { formatPrice } from '@fastgpt/common/bill';
|
|||||||
import { useDatasetStore } from '@/web/core/store/dataset';
|
import { useDatasetStore } from '@/web/core/store/dataset';
|
||||||
import { SelectedDatasetType } from '@/types/core/dataset';
|
import { SelectedDatasetType } from '@/types/core/dataset';
|
||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { LLMModelItemType } from '@/types/model';
|
||||||
|
|
||||||
const SetInputFieldModal = dynamic(() => import('../modules/SetInputFieldModal'));
|
const SetInputFieldModal = dynamic(() => import('../modules/SetInputFieldModal'));
|
||||||
const SelectAppModal = dynamic(() => import('../../../SelectAppModal'));
|
const SelectAppModal = dynamic(() => import('../../../SelectAppModal'));
|
||||||
@ -186,8 +187,8 @@ const RenderInput = ({
|
|||||||
{item.type === FlowInputItemTypeEnum.selectApp && (
|
{item.type === FlowInputItemTypeEnum.selectApp && (
|
||||||
<SelectAppRender item={item} moduleId={moduleId} />
|
<SelectAppRender item={item} moduleId={moduleId} />
|
||||||
)}
|
)}
|
||||||
{item.type === FlowInputItemTypeEnum.quoteList && (
|
{item.type === FlowInputItemTypeEnum.aiSettings && (
|
||||||
<QuoteListRender inputs={sortInputs} item={item} moduleId={moduleId} />
|
<AISetting inputs={sortInputs} item={item} moduleId={moduleId} />
|
||||||
)}
|
)}
|
||||||
{item.type === FlowInputItemTypeEnum.maxToken && (
|
{item.type === FlowInputItemTypeEnum.maxToken && (
|
||||||
<MaxTokenRender inputs={sortInputs} item={item} moduleId={moduleId} />
|
<MaxTokenRender inputs={sortInputs} item={item} moduleId={moduleId} />
|
||||||
@ -343,7 +344,7 @@ var SliderRender = React.memo(function SliderRender({ item, moduleId }: RenderPr
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
var QuoteListRender = React.memo(function QuoteListRender({ inputs = [], moduleId }: RenderProps) {
|
var AISetting = React.memo(function AISetting({ inputs = [], moduleId }: RenderProps) {
|
||||||
const { onChangeNode } = useFlowStore();
|
const { onChangeNode } = useFlowStore();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const chatModulesData = useMemo(() => {
|
const chatModulesData = useMemo(() => {
|
||||||
@ -367,10 +368,11 @@ var QuoteListRender = React.memo(function QuoteListRender({ inputs = [], moduleI
|
|||||||
leftIcon={<MyIcon name={'settingLight'} w={'14px'} />}
|
leftIcon={<MyIcon name={'settingLight'} w={'14px'} />}
|
||||||
onClick={onOpenAIChatSetting}
|
onClick={onOpenAIChatSetting}
|
||||||
>
|
>
|
||||||
{t('app.Quote Prompt Settings')}
|
{t('app.AI Settings')}
|
||||||
</Button>
|
</Button>
|
||||||
{isOpenAIChatSetting && (
|
{isOpenAIChatSetting && (
|
||||||
<AIChatSettingsModal
|
<AIChatSettingsModal
|
||||||
|
isAdEdit
|
||||||
onClose={onCloseAIChatSetting}
|
onClose={onCloseAIChatSetting}
|
||||||
onSuccess={(e) => {
|
onSuccess={(e) => {
|
||||||
for (let key in e) {
|
for (let key in e) {
|
||||||
@ -404,7 +406,7 @@ var MaxTokenRender = React.memo(function MaxTokenRender({
|
|||||||
const { onChangeNode } = useFlowStore();
|
const { onChangeNode } = useFlowStore();
|
||||||
const model = inputs.find((item) => item.key === 'model')?.value;
|
const model = inputs.find((item) => item.key === 'model')?.value;
|
||||||
const modelData = chatModelList.find((item) => item.model === model);
|
const modelData = chatModelList.find((item) => item.model === model);
|
||||||
const maxToken = modelData ? modelData.contextMaxToken : 4000;
|
const maxToken = modelData ? modelData.maxToken : 4000;
|
||||||
const markList = [
|
const markList = [
|
||||||
{ label: '100', value: 100 },
|
{ label: '100', value: 100 },
|
||||||
{ label: `${maxToken}`, value: maxToken }
|
{ label: `${maxToken}`, value: maxToken }
|
||||||
@ -441,8 +443,42 @@ var SelectChatModelRender = React.memo(function SelectChatModelRender({
|
|||||||
moduleId
|
moduleId
|
||||||
}: RenderProps) {
|
}: RenderProps) {
|
||||||
const { onChangeNode } = useFlowStore();
|
const { onChangeNode } = useFlowStore();
|
||||||
|
const modelList = (item.customData?.() as LLMModelItemType[]) || chatModelList || [];
|
||||||
|
|
||||||
const list = chatModelList.map((item) => {
|
function onChangeModel(e: string) {
|
||||||
|
{
|
||||||
|
onChangeNode({
|
||||||
|
moduleId,
|
||||||
|
type: 'inputs',
|
||||||
|
key: item.key,
|
||||||
|
value: {
|
||||||
|
...item,
|
||||||
|
value: e
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// update max tokens
|
||||||
|
const model = modelList.find((item) => item.model === e) || modelList[0];
|
||||||
|
if (!model) return;
|
||||||
|
|
||||||
|
onChangeNode({
|
||||||
|
moduleId,
|
||||||
|
type: 'inputs',
|
||||||
|
key: 'maxToken',
|
||||||
|
value: {
|
||||||
|
...inputs.find((input) => input.key === 'maxToken'),
|
||||||
|
markList: [
|
||||||
|
{ label: '100', value: 100 },
|
||||||
|
{ label: `${model.maxToken}`, value: model.maxToken }
|
||||||
|
],
|
||||||
|
max: model.maxToken,
|
||||||
|
value: model.maxToken / 2
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const list = modelList.map((item) => {
|
||||||
const priceStr = `(${formatPrice(item.price, 1000)}元/1k Tokens)`;
|
const priceStr = `(${formatPrice(item.price, 1000)}元/1k Tokens)`;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -451,43 +487,11 @@ var SelectChatModelRender = React.memo(function SelectChatModelRender({
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
return (
|
if (!item.value && list.length > 0) {
|
||||||
<MySelect
|
onChangeModel(list[0].value);
|
||||||
width={'100%'}
|
}
|
||||||
value={item.value}
|
|
||||||
list={list}
|
|
||||||
onchange={(e) => {
|
|
||||||
onChangeNode({
|
|
||||||
moduleId,
|
|
||||||
type: 'inputs',
|
|
||||||
key: item.key,
|
|
||||||
value: {
|
|
||||||
...item,
|
|
||||||
value: e
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// update max tokens
|
return <MySelect width={'100%'} value={item.value} list={list} onchange={onChangeModel} />;
|
||||||
const model = chatModelList.find((item) => item.model === e) || chatModelList[0];
|
|
||||||
if (!model) return;
|
|
||||||
|
|
||||||
onChangeNode({
|
|
||||||
moduleId,
|
|
||||||
type: 'inputs',
|
|
||||||
key: 'maxToken',
|
|
||||||
value: {
|
|
||||||
...inputs.find((input) => input.key === 'maxToken'),
|
|
||||||
markList: [
|
|
||||||
{ label: '100', value: 100 },
|
|
||||||
{ label: `${model.contextMaxToken}`, value: model.contextMaxToken }
|
|
||||||
],
|
|
||||||
max: model.contextMaxToken,
|
|
||||||
value: model.contextMaxToken / 2
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
var SelectDatasetRender = React.memo(function SelectDatasetRender({ item, moduleId }: RenderProps) {
|
var SelectDatasetRender = React.memo(function SelectDatasetRender({ item, moduleId }: RenderProps) {
|
||||||
|
|||||||
@ -25,6 +25,7 @@ import MyTooltip from '@/components/MyTooltip';
|
|||||||
import TemplateList from './components/TemplateList';
|
import TemplateList from './components/TemplateList';
|
||||||
import ChatTest, { type ChatTestComponentRef } from './components/ChatTest';
|
import ChatTest, { type ChatTestComponentRef } from './components/ChatTest';
|
||||||
import FlowProvider, { useFlowStore } from './components/Provider';
|
import FlowProvider, { useFlowStore } from './components/Provider';
|
||||||
|
import Header from './components/Header';
|
||||||
|
|
||||||
const ImportSettings = dynamic(() => import('./components/ImportSettings'));
|
const ImportSettings = dynamic(() => import('./components/ImportSettings'));
|
||||||
const NodeChat = dynamic(() => import('./components/Nodes/NodeChat'));
|
const NodeChat = dynamic(() => import('./components/Nodes/NodeChat'));
|
||||||
@ -62,187 +63,7 @@ const edgeTypes = {
|
|||||||
};
|
};
|
||||||
type Props = { app: AppSchema; onCloseSettings: () => void };
|
type Props = { app: AppSchema; onCloseSettings: () => void };
|
||||||
|
|
||||||
function FlowHeader({ app, onCloseSettings }: Props & {}) {
|
const AppEdit = React.memo(function AppEdit(props: Props) {
|
||||||
const theme = useTheme();
|
|
||||||
const { t } = useTranslation();
|
|
||||||
const { copyData } = useCopyData();
|
|
||||||
const ChatTestRef = useRef<ChatTestComponentRef>(null);
|
|
||||||
const { isOpen: isOpenImport, onOpen: onOpenImport, onClose: onCloseImport } = useDisclosure();
|
|
||||||
const { updateAppDetail } = useUserStore();
|
|
||||||
const { nodes, edges, onFixView } = useFlowStore();
|
|
||||||
|
|
||||||
const [testModules, setTestModules] = useState<AppModuleItemType[]>();
|
|
||||||
|
|
||||||
const flow2AppModules = useCallback(() => {
|
|
||||||
const modules: AppModuleItemType[] = nodes.map((item) => ({
|
|
||||||
moduleId: item.data.moduleId,
|
|
||||||
name: item.data.name,
|
|
||||||
flowType: item.data.flowType,
|
|
||||||
showStatus: item.data.showStatus,
|
|
||||||
position: item.position,
|
|
||||||
inputs: item.data.inputs.map((item) => ({
|
|
||||||
...item,
|
|
||||||
connected: item.connected ?? item.type !== FlowInputItemTypeEnum.target
|
|
||||||
})),
|
|
||||||
outputs: item.data.outputs.map((item) => ({
|
|
||||||
...item,
|
|
||||||
targets: [] as FlowOutputTargetItemType[]
|
|
||||||
}))
|
|
||||||
}));
|
|
||||||
|
|
||||||
// update inputs and outputs
|
|
||||||
modules.forEach((module) => {
|
|
||||||
module.inputs.forEach((input) => {
|
|
||||||
input.connected =
|
|
||||||
input.connected ||
|
|
||||||
!!edges.find(
|
|
||||||
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
|
|
||||||
);
|
|
||||||
});
|
|
||||||
module.outputs.forEach((output) => {
|
|
||||||
output.targets = edges
|
|
||||||
.filter(
|
|
||||||
(edge) =>
|
|
||||||
edge.source === module.moduleId &&
|
|
||||||
edge.sourceHandle === output.key &&
|
|
||||||
edge.targetHandle
|
|
||||||
)
|
|
||||||
.map((edge) => ({
|
|
||||||
moduleId: edge.target,
|
|
||||||
key: edge.targetHandle || ''
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return modules;
|
|
||||||
}, [edges, nodes]);
|
|
||||||
|
|
||||||
const { mutate: onclickSave, isLoading } = useRequest({
|
|
||||||
mutationFn: () => {
|
|
||||||
const modules = flow2AppModules();
|
|
||||||
// check required connect
|
|
||||||
for (let i = 0; i < modules.length; i++) {
|
|
||||||
const item = modules[i];
|
|
||||||
if (item.inputs.find((input) => input.required && !input.connected)) {
|
|
||||||
return Promise.reject(`【${item.name}】存在未连接的必填输入`);
|
|
||||||
}
|
|
||||||
if (item.inputs.find((input) => input.valueCheck && !input.valueCheck(input.value))) {
|
|
||||||
return Promise.reject(`【${item.name}】存在为填写的必填项`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return updateAppDetail(app._id, {
|
|
||||||
modules: modules,
|
|
||||||
type: AppTypeEnum.advanced
|
|
||||||
});
|
|
||||||
},
|
|
||||||
successToast: '保存配置成功',
|
|
||||||
errorToast: '保存配置异常',
|
|
||||||
onSuccess() {
|
|
||||||
ChatTestRef.current?.resetChatTest();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<Flex
|
|
||||||
py={3}
|
|
||||||
px={[2, 5, 8]}
|
|
||||||
borderBottom={theme.borders.base}
|
|
||||||
alignItems={'center'}
|
|
||||||
userSelect={'none'}
|
|
||||||
>
|
|
||||||
<MyTooltip label={'返回'} offset={[10, 10]}>
|
|
||||||
<IconButton
|
|
||||||
size={'sm'}
|
|
||||||
icon={<MyIcon name={'back'} w={'14px'} />}
|
|
||||||
borderRadius={'md'}
|
|
||||||
borderColor={'myGray.300'}
|
|
||||||
variant={'base'}
|
|
||||||
aria-label={''}
|
|
||||||
onClick={() => {
|
|
||||||
onCloseSettings();
|
|
||||||
onFixView();
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
<Box ml={[3, 6]} fontSize={['md', '2xl']} flex={1}>
|
|
||||||
{app.name}
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
<MyTooltip label={t('app.Import Configs')}>
|
|
||||||
<IconButton
|
|
||||||
mr={[3, 6]}
|
|
||||||
icon={<MyIcon name={'importLight'} w={['14px', '16px']} />}
|
|
||||||
borderRadius={'lg'}
|
|
||||||
variant={'base'}
|
|
||||||
aria-label={'save'}
|
|
||||||
onClick={onOpenImport}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
<MyTooltip label={t('app.Export Configs')}>
|
|
||||||
<IconButton
|
|
||||||
mr={[3, 6]}
|
|
||||||
icon={<MyIcon name={'export'} w={['14px', '16px']} />}
|
|
||||||
borderRadius={'lg'}
|
|
||||||
variant={'base'}
|
|
||||||
aria-label={'save'}
|
|
||||||
onClick={() =>
|
|
||||||
copyData(
|
|
||||||
JSON.stringify(flow2AppModules(), null, 2),
|
|
||||||
t('app.Export Config Successful')
|
|
||||||
)
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
|
|
||||||
{testModules ? (
|
|
||||||
<IconButton
|
|
||||||
mr={[3, 6]}
|
|
||||||
icon={<SmallCloseIcon fontSize={'25px'} />}
|
|
||||||
variant={'base'}
|
|
||||||
color={'myGray.600'}
|
|
||||||
borderRadius={'lg'}
|
|
||||||
aria-label={''}
|
|
||||||
onClick={() => setTestModules(undefined)}
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<MyTooltip label={'测试对话'}>
|
|
||||||
<IconButton
|
|
||||||
mr={[3, 6]}
|
|
||||||
icon={<MyIcon name={'chat'} w={['14px', '16px']} />}
|
|
||||||
borderRadius={'lg'}
|
|
||||||
aria-label={'save'}
|
|
||||||
variant={'base'}
|
|
||||||
onClick={() => {
|
|
||||||
setTestModules(flow2AppModules());
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<MyTooltip label={'保存配置'}>
|
|
||||||
<IconButton
|
|
||||||
icon={<MyIcon name={'save'} w={['14px', '16px']} />}
|
|
||||||
borderRadius={'lg'}
|
|
||||||
isLoading={isLoading}
|
|
||||||
aria-label={'save'}
|
|
||||||
onClick={onclickSave}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
</Flex>
|
|
||||||
{isOpenImport && <ImportSettings onClose={onCloseImport} />}
|
|
||||||
<ChatTest
|
|
||||||
ref={ChatTestRef}
|
|
||||||
modules={testModules}
|
|
||||||
app={app}
|
|
||||||
onClose={() => setTestModules(undefined)}
|
|
||||||
/>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const Header = React.memo(FlowHeader);
|
|
||||||
|
|
||||||
const AppEdit = (props: Props) => {
|
|
||||||
const { app } = props;
|
const { app } = props;
|
||||||
|
|
||||||
const {
|
const {
|
||||||
@ -261,7 +82,7 @@ const AppEdit = (props: Props) => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{/* header */}
|
{/* header */}
|
||||||
<Header {...props} />
|
<Header app={app} onCloseSettings={props.onCloseSettings} />
|
||||||
<Box
|
<Box
|
||||||
minH={'400px'}
|
minH={'400px'}
|
||||||
flex={'1 0 0'}
|
flex={'1 0 0'}
|
||||||
@ -318,11 +139,11 @@ const AppEdit = (props: Props) => {
|
|||||||
<Controls position={'bottom-right'} style={{ display: 'flex' }} showInteractive={false} />
|
<Controls position={'bottom-right'} style={{ display: 'flex' }} showInteractive={false} />
|
||||||
</ReactFlow>
|
</ReactFlow>
|
||||||
|
|
||||||
<TemplateList isOpen={isOpenTemplate} nodes={nodes} onClose={onCloseTemplate} />
|
<TemplateList isOpen={isOpenTemplate} onClose={onCloseTemplate} />
|
||||||
</Box>
|
</Box>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
});
|
||||||
|
|
||||||
const Flow = (data: Props) => {
|
const Flow = (data: Props) => {
|
||||||
return (
|
return (
|
||||||
|
|||||||
@ -34,7 +34,6 @@ import { chatModelList } from '@/web/common/store/static';
|
|||||||
import { formatPrice } from '@fastgpt/common/bill/index';
|
import { formatPrice } from '@fastgpt/common/bill/index';
|
||||||
import {
|
import {
|
||||||
ChatModelSystemTip,
|
ChatModelSystemTip,
|
||||||
ChatModelLimitTip,
|
|
||||||
welcomeTextTip,
|
welcomeTextTip,
|
||||||
questionGuideTip
|
questionGuideTip
|
||||||
} from '@/constants/flow/ModuleTemplate';
|
} from '@/constants/flow/ModuleTemplate';
|
||||||
@ -128,12 +127,7 @@ const Settings = ({ appId }: { appId: string }) => {
|
|||||||
label: `${item.name} (${formatPrice(item.price, 1000)} 元/1k tokens)`
|
label: `${item.name} (${formatPrice(item.price, 1000)} 元/1k tokens)`
|
||||||
}));
|
}));
|
||||||
}, [refresh]);
|
}, [refresh]);
|
||||||
const tokenLimit = useMemo(() => {
|
|
||||||
return (
|
|
||||||
chatModelList.find((item) => item.model === getValues('chatModel.model'))?.contextMaxToken ||
|
|
||||||
4000
|
|
||||||
);
|
|
||||||
}, [getValues, refresh]);
|
|
||||||
const selectedKbList = useMemo(
|
const selectedKbList = useMemo(
|
||||||
() => allDatasets.filter((item) => kbList.find((kb) => kb.kbId === item._id)),
|
() => allDatasets.filter((item) => kbList.find((kb) => kb.kbId === item._id)),
|
||||||
[allDatasets, kbList]
|
[allDatasets, kbList]
|
||||||
@ -411,6 +405,10 @@ const Settings = ({ appId }: { appId: string }) => {
|
|||||||
<Box ml={2} flex={1}>
|
<Box ml={2} flex={1}>
|
||||||
AI 配置
|
AI 配置
|
||||||
</Box>
|
</Box>
|
||||||
|
<Flex {...BoxBtnStyles} onClick={onOpenAIChatSetting}>
|
||||||
|
<MyIcon mr={1} name={'settingLight'} w={'14px'} />
|
||||||
|
高级配置
|
||||||
|
</Flex>
|
||||||
</Flex>
|
</Flex>
|
||||||
|
|
||||||
<Flex alignItems={'center'} mt={5}>
|
<Flex alignItems={'center'} mt={5}>
|
||||||
@ -424,7 +422,7 @@ const Settings = ({ appId }: { appId: string }) => {
|
|||||||
setValue('chatModel.model', val);
|
setValue('chatModel.model', val);
|
||||||
const maxToken =
|
const maxToken =
|
||||||
chatModelList.find((item) => item.model === getValues('chatModel.model'))
|
chatModelList.find((item) => item.model === getValues('chatModel.model'))
|
||||||
?.contextMaxToken || 4000;
|
?.maxToken || 4000;
|
||||||
const token = maxToken / 2;
|
const token = maxToken / 2;
|
||||||
setValue('chatModel.maxToken', token);
|
setValue('chatModel.maxToken', token);
|
||||||
setRefresh(!refresh);
|
setRefresh(!refresh);
|
||||||
@ -432,45 +430,6 @@ const Settings = ({ appId }: { appId: string }) => {
|
|||||||
/>
|
/>
|
||||||
</Box>
|
</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex alignItems={'center'} my={10}>
|
|
||||||
<Box {...LabelStyles}>温度</Box>
|
|
||||||
<Box flex={1} ml={'10px'}>
|
|
||||||
<MySlider
|
|
||||||
markList={[
|
|
||||||
{ label: '严谨', value: 0 },
|
|
||||||
{ label: '发散', value: 10 }
|
|
||||||
]}
|
|
||||||
width={'95%'}
|
|
||||||
min={0}
|
|
||||||
max={10}
|
|
||||||
value={getValues('chatModel.temperature')}
|
|
||||||
onChange={(e) => {
|
|
||||||
setValue('chatModel.temperature', e);
|
|
||||||
setRefresh(!refresh);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</Flex>
|
|
||||||
<Flex alignItems={'center'} mt={12} mb={10}>
|
|
||||||
<Box {...LabelStyles}>回复上限</Box>
|
|
||||||
<Box flex={1} ml={'10px'}>
|
|
||||||
<MySlider
|
|
||||||
markList={[
|
|
||||||
{ label: '100', value: 100 },
|
|
||||||
{ label: `${tokenLimit}`, value: tokenLimit }
|
|
||||||
]}
|
|
||||||
width={'95%'}
|
|
||||||
min={100}
|
|
||||||
max={tokenLimit}
|
|
||||||
step={50}
|
|
||||||
value={getValues('chatModel.maxToken')}
|
|
||||||
onChange={(val) => {
|
|
||||||
setValue('chatModel.maxToken', val);
|
|
||||||
setRefresh(!refresh);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
</Flex>
|
|
||||||
<Flex mt={10} alignItems={'flex-start'}>
|
<Flex mt={10} alignItems={'flex-start'}>
|
||||||
<Box {...LabelStyles}>
|
<Box {...LabelStyles}>
|
||||||
提示词
|
提示词
|
||||||
@ -502,10 +461,6 @@ const Settings = ({ appId }: { appId: string }) => {
|
|||||||
<MyIcon name={'edit'} w={'14px'} mr={1} />
|
<MyIcon name={'edit'} w={'14px'} mr={1} />
|
||||||
参数
|
参数
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex {...BoxBtnStyles} onClick={onOpenAIChatSetting}>
|
|
||||||
<MyIcon mr={1} name={'settingLight'} w={'14px'} />
|
|
||||||
提示词
|
|
||||||
</Flex>
|
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={1} color={'myGray.600'} fontSize={['sm', 'md']}>
|
<Flex mt={1} color={'myGray.600'} fontSize={['sm', 'md']}>
|
||||||
相似度: {getValues('kb.searchSimilarity')}, 单次搜索数量: {getValues('kb.searchLimit')},
|
相似度: {getValues('kb.searchSimilarity')}, 单次搜索数量: {getValues('kb.searchLimit')},
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import { useMutation } from '@tanstack/react-query';
|
|||||||
import { splitText2Chunks } from '@/utils/file';
|
import { splitText2Chunks } from '@/utils/file';
|
||||||
import { getErrText } from '@/utils/tools';
|
import { getErrText } from '@/utils/tools';
|
||||||
import { formatPrice } from '@fastgpt/common/bill/index';
|
import { formatPrice } from '@fastgpt/common/bill/index';
|
||||||
import { qaModel } from '@/web/common/store/static';
|
import { qaModelList } from '@/web/common/store/static';
|
||||||
import MyIcon from '@/components/Icon';
|
import MyIcon from '@/components/Icon';
|
||||||
import CloseIcon from '@/components/Icon/close';
|
import CloseIcon from '@/components/Icon/close';
|
||||||
import DeleteIcon, { hoverDeleteStyles } from '@/components/Icon/delete';
|
import DeleteIcon, { hoverDeleteStyles } from '@/components/Icon/delete';
|
||||||
@ -23,8 +23,9 @@ import { chunksUpload } from '@/web/core/utils/dataset';
|
|||||||
const fileExtension = '.txt, .doc, .docx, .pdf, .md';
|
const fileExtension = '.txt, .doc, .docx, .pdf, .md';
|
||||||
|
|
||||||
const QAImport = ({ kbId }: { kbId: string }) => {
|
const QAImport = ({ kbId }: { kbId: string }) => {
|
||||||
const unitPrice = qaModel.price || 3;
|
const qaModel = qaModelList[0];
|
||||||
const chunkLen = qaModel.maxToken * 0.45;
|
const unitPrice = qaModel?.price || 3;
|
||||||
|
const chunkLen = qaModel?.maxToken * 0.45;
|
||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|||||||
@ -13,9 +13,9 @@ import MyTooltip from '@/components/MyTooltip';
|
|||||||
import MyModal from '@/components/MyModal';
|
import MyModal from '@/components/MyModal';
|
||||||
import { postCreateDataset } from '@/web/core/api/dataset';
|
import { postCreateDataset } from '@/web/core/api/dataset';
|
||||||
import type { CreateDatasetParams } from '@/global/core/api/datasetReq.d';
|
import type { CreateDatasetParams } from '@/global/core/api/datasetReq.d';
|
||||||
import { vectorModelList } from '@/web/common/store/static';
|
|
||||||
import MySelect from '@/components/Select';
|
import MySelect from '@/components/Select';
|
||||||
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||||
|
import { vectorModelList } from '@/web/common/store/static';
|
||||||
import Tag from '@/components/Tag';
|
import Tag from '@/components/Tag';
|
||||||
|
|
||||||
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { Bill } from '@/service/mongo';
|
import { Bill } from '@/service/mongo';
|
||||||
import { MongoUser } from '@fastgpt/support/user/schema';
|
import { MongoUser } from '@fastgpt/support/user/schema';
|
||||||
import { BillSourceEnum } from '@/constants/user';
|
import { BillSourceEnum } from '@/constants/user';
|
||||||
import { getModel } from '@/service/utils/data';
|
import { getModelMap, ModelTypeEnum } from '@/service/core/ai/model';
|
||||||
import { ChatHistoryItemResType } from '@/types/chat';
|
import { ChatHistoryItemResType } from '@/types/chat';
|
||||||
import { formatPrice } from '@fastgpt/common/bill/index';
|
import { formatPrice } from '@fastgpt/common/bill/index';
|
||||||
import { addLog } from '@/service/utils/tools';
|
import { addLog } from '@/service/utils/tools';
|
||||||
import type { CreateBillType } from '@/types/common/bill';
|
import type { CreateBillType } from '@/types/common/bill';
|
||||||
import { defaultQGModel } from '@/pages/api/system/getInitData';
|
import { defaultQGModels } from '@/constants/model';
|
||||||
|
|
||||||
async function createBill(data: CreateBillType) {
|
async function createBill(data: CreateBillType) {
|
||||||
try {
|
try {
|
||||||
@ -106,7 +106,7 @@ export const pushQABill = async ({
|
|||||||
addLog.info('splitData generate success', { totalTokens });
|
addLog.info('splitData generate success', { totalTokens });
|
||||||
|
|
||||||
// 获取模型单价格, 都是用 gpt35 拆分
|
// 获取模型单价格, 都是用 gpt35 拆分
|
||||||
const unitPrice = global.qaModel.price || 3;
|
const unitPrice = global.qaModels?.[0]?.price || 3;
|
||||||
// 计算价格
|
// 计算价格
|
||||||
const total = unitPrice * totalTokens;
|
const total = unitPrice * totalTokens;
|
||||||
|
|
||||||
@ -158,7 +158,7 @@ export const pushGenerateVectorBill = async ({
|
|||||||
{
|
{
|
||||||
moduleName: '索引生成',
|
moduleName: '索引生成',
|
||||||
amount: total,
|
amount: total,
|
||||||
model: vectorModel.model,
|
model: vectorModel.name,
|
||||||
tokenLen
|
tokenLen
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -167,14 +167,22 @@ export const pushGenerateVectorBill = async ({
|
|||||||
return { total };
|
return { total };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const countModelPrice = ({ model, tokens }: { model: string; tokens: number }) => {
|
export const countModelPrice = ({
|
||||||
const modelData = getModel(model);
|
model,
|
||||||
|
tokens,
|
||||||
|
type
|
||||||
|
}: {
|
||||||
|
model: string;
|
||||||
|
tokens: number;
|
||||||
|
type: `${ModelTypeEnum}`;
|
||||||
|
}) => {
|
||||||
|
const modelData = getModelMap?.[type]?.(model);
|
||||||
if (!modelData) return 0;
|
if (!modelData) return 0;
|
||||||
return modelData.price * tokens;
|
return modelData.price * tokens;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const pushQuestionGuideBill = ({ tokens, userId }: { tokens: number; userId: string }) => {
|
export const pushQuestionGuideBill = ({ tokens, userId }: { tokens: number; userId: string }) => {
|
||||||
const qgModel = global.qgModel || defaultQGModel;
|
const qgModel = global.qgModels?.[0] || defaultQGModels[0];
|
||||||
const total = qgModel.price * tokens;
|
const total = qgModel.price * tokens;
|
||||||
createBill({
|
createBill({
|
||||||
userId,
|
userId,
|
||||||
|
|||||||
68
projects/app/src/service/core/ai/model.ts
Normal file
68
projects/app/src/service/core/ai/model.ts
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import {
|
||||||
|
defaultChatModels,
|
||||||
|
defaultCQModels,
|
||||||
|
defaultExtractModels,
|
||||||
|
defaultQAModels,
|
||||||
|
defaultQGModels,
|
||||||
|
defaultVectorModels
|
||||||
|
} from '@/constants/model';
|
||||||
|
|
||||||
|
export const getChatModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
(global.chatModels || defaultChatModels).find((item) => item.model === model) ||
|
||||||
|
defaultChatModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export const getQAModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
(global.qaModels || defaultQAModels).find((item) => item.model === model) ||
|
||||||
|
global.qaModels?.[0] ||
|
||||||
|
defaultQAModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export const getCQModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
(global.cqModels || defaultCQModels).find((item) => item.model === model) ||
|
||||||
|
global.cqModels?.[0] ||
|
||||||
|
defaultCQModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export const getExtractModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
(global.extractModels || defaultExtractModels).find((item) => item.model === model) ||
|
||||||
|
global.extractModels?.[0] ||
|
||||||
|
defaultExtractModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export const getQGModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
(global.qgModels || defaultQGModels).find((item) => item.model === model) ||
|
||||||
|
global.qgModels?.[0] ||
|
||||||
|
defaultQGModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getVectorModel = (model?: string) => {
|
||||||
|
return (
|
||||||
|
global.vectorModels.find((item) => item.model === model) ||
|
||||||
|
global.vectorModels?.[0] ||
|
||||||
|
defaultVectorModels[0]
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export enum ModelTypeEnum {
|
||||||
|
chat = 'chat',
|
||||||
|
qa = 'qa',
|
||||||
|
cq = 'cq',
|
||||||
|
extract = 'extract',
|
||||||
|
qg = 'qg',
|
||||||
|
vector = 'vector'
|
||||||
|
}
|
||||||
|
export const getModelMap = {
|
||||||
|
[ModelTypeEnum.chat]: getChatModel,
|
||||||
|
[ModelTypeEnum.qa]: getQAModel,
|
||||||
|
[ModelTypeEnum.cq]: getCQModel,
|
||||||
|
[ModelTypeEnum.extract]: getExtractModel,
|
||||||
|
[ModelTypeEnum.qg]: getQGModel,
|
||||||
|
[ModelTypeEnum.vector]: getVectorModel
|
||||||
|
};
|
||||||
12
projects/app/src/service/core/app/module.ts
Normal file
12
projects/app/src/service/core/app/module.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||||
|
import { AppModuleItemType } from '@/types/app';
|
||||||
|
|
||||||
|
export const getChatModelNameListByModules = (modules: AppModuleItemType[]): string[] => {
|
||||||
|
const chatModules = modules.filter((item) => item.flowType === FlowModuleTypeEnum.chatNode);
|
||||||
|
return chatModules
|
||||||
|
.map((item) => {
|
||||||
|
const model = item.inputs.find((input) => input.key === 'model')?.value;
|
||||||
|
return global.chatModels.find((item) => item.model === model)?.name || '';
|
||||||
|
})
|
||||||
|
.filter((item) => item);
|
||||||
|
};
|
||||||
@ -73,7 +73,7 @@ export async function generateQA(): Promise<any> {
|
|||||||
];
|
];
|
||||||
const ai = getAIApi(undefined, 480000);
|
const ai = getAIApi(undefined, 480000);
|
||||||
const chatResponse = await ai.chat.completions.create({
|
const chatResponse = await ai.chat.completions.create({
|
||||||
model: global.qaModel.model,
|
model: global.qaModels[0].model,
|
||||||
temperature: 0.01,
|
temperature: 0.01,
|
||||||
messages,
|
messages,
|
||||||
stream: false
|
stream: false
|
||||||
|
|||||||
@ -10,9 +10,11 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
|
|||||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||||
import { replaceVariable } from '@/utils/common/tools/text';
|
import { replaceVariable } from '@/utils/common/tools/text';
|
||||||
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||||
import { defaultCQModel } from '@/pages/api/system/getInitData';
|
import { FunctionModelItemType } from '@/types/model';
|
||||||
|
import { getCQModel } from '@/service/core/ai/model';
|
||||||
|
|
||||||
type Props = ModuleDispatchProps<{
|
type Props = ModuleDispatchProps<{
|
||||||
|
model: string;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
history?: ChatItemType[];
|
history?: ChatItemType[];
|
||||||
[SystemInputEnum.userChatInput]: string;
|
[SystemInputEnum.userChatInput]: string;
|
||||||
@ -30,20 +32,26 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
|||||||
const {
|
const {
|
||||||
moduleName,
|
moduleName,
|
||||||
user,
|
user,
|
||||||
inputs: { agents, userChatInput }
|
inputs: { model, agents, userChatInput }
|
||||||
} = props as Props;
|
} = props as Props;
|
||||||
|
|
||||||
if (!userChatInput) {
|
if (!userChatInput) {
|
||||||
return Promise.reject('Input is empty');
|
return Promise.reject('Input is empty');
|
||||||
}
|
}
|
||||||
|
|
||||||
const cqModel = global.cqModel || defaultCQModel;
|
const cqModel = getCQModel(model);
|
||||||
|
|
||||||
const { arg, tokens } = await (async () => {
|
const { arg, tokens } = await (async () => {
|
||||||
if (cqModel.functionCall) {
|
if (cqModel.functionCall) {
|
||||||
return functionCall(props);
|
return functionCall({
|
||||||
|
...props,
|
||||||
|
cqModel
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return completions(props);
|
return completions({
|
||||||
|
...props,
|
||||||
|
cqModel
|
||||||
|
});
|
||||||
})();
|
})();
|
||||||
|
|
||||||
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
|
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
|
||||||
@ -64,45 +72,45 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
|||||||
|
|
||||||
async function functionCall({
|
async function functionCall({
|
||||||
user,
|
user,
|
||||||
|
cqModel,
|
||||||
inputs: { agents, systemPrompt, history = [], userChatInput }
|
inputs: { agents, systemPrompt, history = [], userChatInput }
|
||||||
}: Props) {
|
}: Props & { cqModel: FunctionModelItemType }) {
|
||||||
const cqModel = global.cqModel;
|
|
||||||
|
|
||||||
const messages: ChatItemType[] = [
|
const messages: ChatItemType[] = [
|
||||||
...(systemPrompt
|
|
||||||
? [
|
|
||||||
{
|
|
||||||
obj: ChatRoleEnum.System,
|
|
||||||
value: systemPrompt
|
|
||||||
}
|
|
||||||
]
|
|
||||||
: []),
|
|
||||||
...history,
|
...history,
|
||||||
{
|
{
|
||||||
obj: ChatRoleEnum.Human,
|
obj: ChatRoleEnum.Human,
|
||||||
value: userChatInput
|
value: systemPrompt
|
||||||
|
? `补充的背景知识:
|
||||||
|
"""
|
||||||
|
${systemPrompt}
|
||||||
|
"""
|
||||||
|
我的问题: ${userChatInput}
|
||||||
|
`
|
||||||
|
: userChatInput
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
const filterMessages = ChatContextFilter({
|
const filterMessages = ChatContextFilter({
|
||||||
messages,
|
messages,
|
||||||
maxTokens: cqModel.maxToken
|
maxTokens: cqModel.maxToken
|
||||||
});
|
});
|
||||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||||
|
|
||||||
// function body
|
// function body
|
||||||
const agentFunction = {
|
const agentFunction = {
|
||||||
name: agentFunName,
|
name: agentFunName,
|
||||||
description: '判断用户问题的类型属于哪方面,返回对应的字段',
|
description: '请根据对话记录及补充的背景知识,判断用户的问题类型,并返回对应的字段',
|
||||||
parameters: {
|
parameters: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
type: {
|
type: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: agents.map((item) => `${item.value},返回:'${item.key}'`).join(';'),
|
description: `判断用户的问题类型,并返回对应的字段。下面是几种问题类型: ${agents
|
||||||
|
.map((item) => `${item.value},返回:'${item.key}'`)
|
||||||
|
.join(';')}`,
|
||||||
enum: agents.map((item) => item.key)
|
enum: agents.map((item) => item.key)
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
required: ['type']
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const ai = getAIApi(user.openaiAccount, 48000);
|
const ai = getAIApi(user.openaiAccount, 48000);
|
||||||
@ -133,15 +141,14 @@ async function functionCall({
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function completions({
|
async function completions({
|
||||||
|
cqModel,
|
||||||
user,
|
user,
|
||||||
inputs: { agents, systemPrompt = '', history = [], userChatInput }
|
inputs: { agents, systemPrompt = '', history = [], userChatInput }
|
||||||
}: Props) {
|
}: Props & { cqModel: FunctionModelItemType }) {
|
||||||
const extractModel = global.extractModel;
|
|
||||||
|
|
||||||
const messages: ChatItemType[] = [
|
const messages: ChatItemType[] = [
|
||||||
{
|
{
|
||||||
obj: ChatRoleEnum.Human,
|
obj: ChatRoleEnum.Human,
|
||||||
value: replaceVariable(extractModel.prompt || Prompt_CQJson, {
|
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
|
||||||
systemPrompt,
|
systemPrompt,
|
||||||
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
|
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
|
||||||
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
|
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
|
||||||
@ -153,7 +160,7 @@ Human:${userChatInput}`
|
|||||||
const ai = getAIApi(user.openaiAccount, 480000);
|
const ai = getAIApi(user.openaiAccount, 480000);
|
||||||
|
|
||||||
const data = await ai.chat.completions.create({
|
const data = await ai.chat.completions.create({
|
||||||
model: extractModel.model,
|
model: cqModel.model,
|
||||||
temperature: 0.01,
|
temperature: 0.01,
|
||||||
messages: adaptChat2GptMessages({ messages, reserveId: false }),
|
messages: adaptChat2GptMessages({ messages, reserveId: false }),
|
||||||
stream: false
|
stream: false
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
|
|||||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||||
import { replaceVariable } from '@/utils/common/tools/text';
|
import { replaceVariable } from '@/utils/common/tools/text';
|
||||||
import { defaultExtractModel } from '@/pages/api/system/getInitData';
|
import { FunctionModelItemType } from '@/types/model';
|
||||||
|
|
||||||
type Props = ModuleDispatchProps<{
|
type Props = ModuleDispatchProps<{
|
||||||
history?: ChatItemType[];
|
history?: ChatItemType[];
|
||||||
@ -37,13 +37,19 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
|||||||
return Promise.reject('Input is empty');
|
return Promise.reject('Input is empty');
|
||||||
}
|
}
|
||||||
|
|
||||||
const extractModel = global.extractModel || defaultExtractModel;
|
const extractModel = global.extractModels[0];
|
||||||
|
|
||||||
const { arg, tokens } = await (async () => {
|
const { arg, tokens } = await (async () => {
|
||||||
if (extractModel.functionCall) {
|
if (extractModel.functionCall) {
|
||||||
return functionCall(props);
|
return functionCall({
|
||||||
|
...props,
|
||||||
|
extractModel
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return completions(props);
|
return completions({
|
||||||
|
...props,
|
||||||
|
extractModel
|
||||||
|
});
|
||||||
})();
|
})();
|
||||||
|
|
||||||
// remove invalid key
|
// remove invalid key
|
||||||
@ -83,11 +89,10 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function functionCall({
|
async function functionCall({
|
||||||
|
extractModel,
|
||||||
user,
|
user,
|
||||||
inputs: { history = [], content, extractKeys, description }
|
inputs: { history = [], content, extractKeys, description }
|
||||||
}: Props) {
|
}: Props & { extractModel: FunctionModelItemType }) {
|
||||||
const extractModel = global.extractModel;
|
|
||||||
|
|
||||||
const messages: ChatItemType[] = [
|
const messages: ChatItemType[] = [
|
||||||
...history,
|
...history,
|
||||||
{
|
{
|
||||||
@ -152,15 +157,14 @@ async function functionCall({
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function completions({
|
async function completions({
|
||||||
|
extractModel,
|
||||||
user,
|
user,
|
||||||
inputs: { history = [], content, extractKeys, description }
|
inputs: { history = [], content, extractKeys, description }
|
||||||
}: Props) {
|
}: Props & { extractModel: FunctionModelItemType }) {
|
||||||
const extractModel = global.extractModel;
|
|
||||||
|
|
||||||
const messages: ChatItemType[] = [
|
const messages: ChatItemType[] = [
|
||||||
{
|
{
|
||||||
obj: ChatRoleEnum.Human,
|
obj: ChatRoleEnum.Human,
|
||||||
value: replaceVariable(extractModel.prompt || Prompt_ExtractJson, {
|
value: replaceVariable(extractModel.functionPrompt || Prompt_ExtractJson, {
|
||||||
description,
|
description,
|
||||||
json: extractKeys
|
json: extractKeys
|
||||||
.map(
|
.map(
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import { textAdaptGptResponse } from '@/utils/adapt';
|
|||||||
import { getAIApi } from '@fastgpt/core/ai/config';
|
import { getAIApi } from '@fastgpt/core/ai/config';
|
||||||
import type { ChatCompletion, StreamChatType } from '@fastgpt/core/ai/type';
|
import type { ChatCompletion, StreamChatType } from '@fastgpt/core/ai/type';
|
||||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||||
import { getChatModel } from '@/service/utils/data';
|
|
||||||
import { countModelPrice } from '@/service/common/bill/push';
|
import { countModelPrice } from '@/service/common/bill/push';
|
||||||
import { ChatModelItemType } from '@/types/model';
|
import { ChatModelItemType } from '@/types/model';
|
||||||
import { postTextCensor } from '@fastgpt/common/plusApi/censor';
|
import { postTextCensor } from '@fastgpt/common/plusApi/censor';
|
||||||
@ -15,12 +14,13 @@ import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant'
|
|||||||
import { AppModuleItemType } from '@/types/app';
|
import { AppModuleItemType } from '@/types/app';
|
||||||
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
|
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
|
||||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||||
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/global/core/prompt/AIChat';
|
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
|
||||||
import type { AIChatProps } from '@/types/core/aiChat';
|
import type { AIChatProps } from '@/types/core/aiChat';
|
||||||
import { replaceVariable } from '@/utils/common/tools/text';
|
import { replaceVariable } from '@/utils/common/tools/text';
|
||||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||||
import { responseWrite, responseWriteController } from '@/service/common/stream';
|
import { responseWrite, responseWriteController } from '@fastgpt/common/tools/stream';
|
||||||
|
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
|
||||||
|
|
||||||
export type ChatProps = ModuleDispatchProps<
|
export type ChatProps = ModuleDispatchProps<
|
||||||
AIChatProps & {
|
AIChatProps & {
|
||||||
@ -47,12 +47,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
|||||||
user,
|
user,
|
||||||
outputs,
|
outputs,
|
||||||
inputs: {
|
inputs: {
|
||||||
model = global.chatModels[0]?.model,
|
model,
|
||||||
temperature = 0,
|
temperature = 0,
|
||||||
maxToken = 4000,
|
maxToken = 4000,
|
||||||
history = [],
|
history = [],
|
||||||
quoteQA = [],
|
quoteQA = [],
|
||||||
userChatInput,
|
userChatInput,
|
||||||
|
isResponseAnswerText = true,
|
||||||
systemPrompt = '',
|
systemPrompt = '',
|
||||||
limitPrompt,
|
limitPrompt,
|
||||||
quoteTemplate,
|
quoteTemplate,
|
||||||
@ -63,6 +64,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
|||||||
return Promise.reject('Question is empty');
|
return Promise.reject('Question is empty');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stream = stream && isResponseAnswerText;
|
||||||
|
|
||||||
// temperature adapt
|
// temperature adapt
|
||||||
const modelConstantsData = getChatModel(model);
|
const modelConstantsData = getChatModel(model);
|
||||||
|
|
||||||
@ -110,18 +113,18 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
|||||||
model,
|
model,
|
||||||
temperature,
|
temperature,
|
||||||
max_tokens,
|
max_tokens,
|
||||||
|
stream,
|
||||||
messages: [
|
messages: [
|
||||||
...(modelConstantsData.defaultSystem
|
...(modelConstantsData.defaultSystemChatPrompt
|
||||||
? [
|
? [
|
||||||
{
|
{
|
||||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||||
content: modelConstantsData.defaultSystem
|
content: modelConstantsData.defaultSystemChatPrompt
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
: []),
|
: []),
|
||||||
...messages
|
...messages
|
||||||
],
|
]
|
||||||
stream
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const { answerText, totalTokens, completeMessages } = await (async () => {
|
const { answerText, totalTokens, completeMessages } = await (async () => {
|
||||||
@ -172,7 +175,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
|||||||
[TaskResponseKeyEnum.responseData]: {
|
[TaskResponseKeyEnum.responseData]: {
|
||||||
moduleType: FlowModuleTypeEnum.chatNode,
|
moduleType: FlowModuleTypeEnum.chatNode,
|
||||||
moduleName,
|
moduleName,
|
||||||
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
|
price: user.openaiAccount?.key
|
||||||
|
? 0
|
||||||
|
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
|
||||||
model: modelConstantsData.name,
|
model: modelConstantsData.name,
|
||||||
tokens: totalTokens,
|
tokens: totalTokens,
|
||||||
question: userChatInput,
|
question: userChatInput,
|
||||||
@ -198,7 +203,7 @@ function filterQuote({
|
|||||||
maxTokens: model.quoteMaxToken,
|
maxTokens: model.quoteMaxToken,
|
||||||
messages: quoteQA.map((item, index) => ({
|
messages: quoteQA.map((item, index) => ({
|
||||||
obj: ChatRoleEnum.System,
|
obj: ChatRoleEnum.System,
|
||||||
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
|
value: replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||||
...item,
|
...item,
|
||||||
index: index + 1
|
index: index + 1
|
||||||
})
|
})
|
||||||
@ -212,7 +217,7 @@ function filterQuote({
|
|||||||
filterQuoteQA.length > 0
|
filterQuoteQA.length > 0
|
||||||
? `${filterQuoteQA
|
? `${filterQuoteQA
|
||||||
.map((item, index) =>
|
.map((item, index) =>
|
||||||
replaceVariable(quoteTemplate || defaultQuoteTemplate, {
|
replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||||
...item,
|
...item,
|
||||||
index: `${index + 1}`
|
index: `${index + 1}`
|
||||||
})
|
})
|
||||||
@ -243,7 +248,7 @@ function getChatMessages({
|
|||||||
model: ChatModelItemType;
|
model: ChatModelItemType;
|
||||||
}) {
|
}) {
|
||||||
const question = quoteText
|
const question = quoteText
|
||||||
? replaceVariable(quotePrompt || defaultQuotePrompt, {
|
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
|
||||||
quote: quoteText,
|
quote: quoteText,
|
||||||
question: userChatInput
|
question: userChatInput
|
||||||
})
|
})
|
||||||
@ -275,7 +280,7 @@ function getChatMessages({
|
|||||||
|
|
||||||
const filterMessages = ChatContextFilter({
|
const filterMessages = ChatContextFilter({
|
||||||
messages,
|
messages,
|
||||||
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
|
maxTokens: Math.ceil(model.maxToken - 300) // filter token. not response maxToken
|
||||||
});
|
});
|
||||||
|
|
||||||
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
|
||||||
@ -294,7 +299,7 @@ function getMaxTokens({
|
|||||||
model: ChatModelItemType;
|
model: ChatModelItemType;
|
||||||
filterMessages: ChatProps['inputs']['history'];
|
filterMessages: ChatProps['inputs']['history'];
|
||||||
}) {
|
}) {
|
||||||
const tokensLimit = model.contextMaxToken;
|
const tokensLimit = model.maxToken;
|
||||||
/* count response max token */
|
/* count response max token */
|
||||||
|
|
||||||
const promptsToken = countMessagesTokens({
|
const promptsToken = countMessagesTokens({
|
||||||
@ -349,7 +354,7 @@ async function streamResponse({
|
|||||||
stream.controller?.abort();
|
stream.controller?.abort();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const content = part.choices[0]?.delta?.content || '';
|
const content = part.choices?.[0]?.delta?.content || '';
|
||||||
answer += content;
|
answer += content;
|
||||||
|
|
||||||
responseWrite({
|
responseWrite({
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import type { QuoteItemType } from '@/types/chat';
|
|||||||
import { PgDatasetTableName } from '@/constants/plugin';
|
import { PgDatasetTableName } from '@/constants/plugin';
|
||||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||||
|
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||||
type KBSearchProps = ModuleDispatchProps<{
|
type KBSearchProps = ModuleDispatchProps<{
|
||||||
kbList: SelectedDatasetType;
|
kbList: SelectedDatasetType;
|
||||||
similarity: number;
|
similarity: number;
|
||||||
@ -66,7 +67,11 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
|
|||||||
responseData: {
|
responseData: {
|
||||||
moduleType: FlowModuleTypeEnum.kbSearchNode,
|
moduleType: FlowModuleTypeEnum.kbSearchNode,
|
||||||
moduleName,
|
moduleName,
|
||||||
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
|
price: countModelPrice({
|
||||||
|
model: vectorModel.model,
|
||||||
|
tokens: tokenLen,
|
||||||
|
type: ModelTypeEnum.vector
|
||||||
|
}),
|
||||||
model: vectorModel.name,
|
model: vectorModel.name,
|
||||||
tokens: tokenLen,
|
tokens: tokenLen,
|
||||||
similarity,
|
similarity,
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||||
import { sseResponse } from '@/service/utils/tools';
|
import { responseWrite } from '@fastgpt/common/tools/stream';
|
||||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||||
export type AnswerProps = ModuleDispatchProps<{
|
export type AnswerProps = ModuleDispatchProps<{
|
||||||
@ -21,7 +21,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
|||||||
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
|
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
|
||||||
|
|
||||||
if (stream) {
|
if (stream) {
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||||
data: textAdaptGptResponse({
|
data: textAdaptGptResponse({
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
|||||||
import { SelectAppItemType } from '@/types/core/app/flow';
|
import { SelectAppItemType } from '@/types/core/app/flow';
|
||||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||||
import { App } from '@/service/mongo';
|
import { App } from '@/service/mongo';
|
||||||
import { responseWrite } from '@/service/common/stream';
|
import { responseWrite } from '@fastgpt/common/tools/stream';
|
||||||
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
|
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||||
|
|
||||||
|
|||||||
@ -232,6 +232,6 @@ export async function initPg() {
|
|||||||
`);
|
`);
|
||||||
console.log('init pg successful');
|
console.log('init pg successful');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
addLog.error('init pg error', error);
|
console.log('init pg error', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
import { sseResponseEventEnum } from '@/constants/chat';
|
import { sseResponseEventEnum } from '@/constants/chat';
|
||||||
import { NextApiResponse } from 'next';
|
import { NextApiResponse } from 'next';
|
||||||
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
|
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/common/constant/errorCode';
|
||||||
import { clearCookie, sseResponse, addLog } from './utils/tools';
|
import { addLog } from './utils/tools';
|
||||||
|
import { clearCookie } from '@fastgpt/support/user/auth';
|
||||||
|
import { responseWrite } from '@fastgpt/common/tools/stream';
|
||||||
|
|
||||||
export interface ResponseType<T = any> {
|
export interface ResponseType<T = any> {
|
||||||
code: number;
|
code: number;
|
||||||
@ -66,7 +68,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
|
|||||||
clearCookie(res);
|
clearCookie(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
return sseResponse({
|
return responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.error,
|
event: sseResponseEventEnum.error,
|
||||||
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
|
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
|
||||||
@ -86,7 +88,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
|
|||||||
|
|
||||||
addLog.error(`sse error: ${msg}`, error);
|
addLog.error(`sse error: ${msg}`, error);
|
||||||
|
|
||||||
sseResponse({
|
responseWrite({
|
||||||
res,
|
res,
|
||||||
event: sseResponseEventEnum.error,
|
event: sseResponseEventEnum.error,
|
||||||
data: JSON.stringify({ message: msg })
|
data: JSON.stringify({ message: msg })
|
||||||
|
|||||||
@ -1,24 +0,0 @@
|
|||||||
export const getChatModel = (model?: string) => {
|
|
||||||
return global.chatModels.find((item) => item.model === model);
|
|
||||||
};
|
|
||||||
export const getVectorModel = (model?: string) => {
|
|
||||||
return (
|
|
||||||
global.vectorModels.find((item) => item.model === model) || {
|
|
||||||
model: 'UnKnow',
|
|
||||||
name: 'UnKnow',
|
|
||||||
defaultToken: 500,
|
|
||||||
price: 0,
|
|
||||||
maxToken: 3000
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getModel = (model?: string) => {
|
|
||||||
return [
|
|
||||||
...global.chatModels,
|
|
||||||
...global.vectorModels,
|
|
||||||
global.qaModel,
|
|
||||||
global.extractModel,
|
|
||||||
global.cqModel
|
|
||||||
].find((item) => item.model === model);
|
|
||||||
};
|
|
||||||
@ -1,37 +1,7 @@
|
|||||||
import type { NextApiResponse, NextApiHandler, NextApiRequest } from 'next';
|
import type { NextApiResponse } from 'next';
|
||||||
import NextCors from 'nextjs-cors';
|
|
||||||
import { generateQA } from '../events/generateQA';
|
import { generateQA } from '../events/generateQA';
|
||||||
import { generateVector } from '../events/generateVector';
|
import { generateVector } from '../events/generateVector';
|
||||||
|
|
||||||
/* set cookie */
|
|
||||||
export const setCookie = (res: NextApiResponse, token: string) => {
|
|
||||||
res.setHeader(
|
|
||||||
'Set-Cookie',
|
|
||||||
`token=${token}; Path=/; HttpOnly; Max-Age=604800; Samesite=None; Secure;`
|
|
||||||
);
|
|
||||||
};
|
|
||||||
/* clear cookie */
|
|
||||||
export const clearCookie = (res: NextApiResponse) => {
|
|
||||||
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
|
|
||||||
};
|
|
||||||
|
|
||||||
export function withNextCors(handler: NextApiHandler): NextApiHandler {
|
|
||||||
return async function nextApiHandlerWrappedWithNextCors(
|
|
||||||
req: NextApiRequest,
|
|
||||||
res: NextApiResponse
|
|
||||||
) {
|
|
||||||
const methods = ['GET', 'eHEAD', 'PUT', 'PATCH', 'POST', 'DELETE'];
|
|
||||||
const origin = req.headers.origin;
|
|
||||||
await NextCors(req, res, {
|
|
||||||
methods,
|
|
||||||
origin: origin,
|
|
||||||
optionsSuccessStatus: 200
|
|
||||||
});
|
|
||||||
|
|
||||||
return handler(req, res);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/* start task */
|
/* start task */
|
||||||
export const startQueue = () => {
|
export const startQueue = () => {
|
||||||
if (!global.systemEnv) return;
|
if (!global.systemEnv) return;
|
||||||
@ -43,20 +13,6 @@ export const startQueue = () => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const sseResponse = ({
|
|
||||||
res,
|
|
||||||
event,
|
|
||||||
data
|
|
||||||
}: {
|
|
||||||
res: NextApiResponse;
|
|
||||||
event?: string;
|
|
||||||
data: string;
|
|
||||||
}) => {
|
|
||||||
if (res.closed) return;
|
|
||||||
event && res.write(`event: ${event}\n`);
|
|
||||||
res.write(`data: ${data}\n\n`);
|
|
||||||
};
|
|
||||||
|
|
||||||
/* add logger */
|
/* add logger */
|
||||||
export const addLog = {
|
export const addLog = {
|
||||||
info: (msg: string, obj?: Record<string, any>) => {
|
info: (msg: string, obj?: Record<string, any>) => {
|
||||||
|
|||||||
3
projects/app/src/types/core/aiChat.d.ts
vendored
3
projects/app/src/types/core/aiChat.d.ts
vendored
@ -1,9 +1,12 @@
|
|||||||
|
import { SystemInputEnum } from '@/constants/app';
|
||||||
|
|
||||||
/* ai chat modules props */
|
/* ai chat modules props */
|
||||||
export type AIChatProps = {
|
export type AIChatProps = {
|
||||||
model: string;
|
model: string;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
temperature: number;
|
temperature: number;
|
||||||
maxToken: number;
|
maxToken: number;
|
||||||
|
[SystemInputEnum.isResponseAnswerText]: boolean;
|
||||||
quoteTemplate?: string;
|
quoteTemplate?: string;
|
||||||
quotePrompt?: string;
|
quotePrompt?: string;
|
||||||
frequency: number;
|
frequency: number;
|
||||||
|
|||||||
1
projects/app/src/types/core/app/flow.d.ts
vendored
1
projects/app/src/types/core/app/flow.d.ts
vendored
@ -31,6 +31,7 @@ export type FlowInputItemType = {
|
|||||||
required?: boolean;
|
required?: boolean;
|
||||||
list?: { label: string; value: any }[];
|
list?: { label: string; value: any }[];
|
||||||
markList?: { label: string; value: any }[];
|
markList?: { label: string; value: any }[];
|
||||||
|
customData?: () => any;
|
||||||
valueCheck?: (value: any) => boolean;
|
valueCheck?: (value: any) => boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
10
projects/app/src/types/index.d.ts
vendored
10
projects/app/src/types/index.d.ts
vendored
@ -3,7 +3,7 @@ import type { Tiktoken } from 'js-tiktoken';
|
|||||||
import {
|
import {
|
||||||
ChatModelItemType,
|
ChatModelItemType,
|
||||||
FunctionModelItemType,
|
FunctionModelItemType,
|
||||||
QAModelItemType,
|
LLMModelItemType,
|
||||||
VectorModelItemType
|
VectorModelItemType
|
||||||
} from './model';
|
} from './model';
|
||||||
import { TrackEventName } from '@/constants/common';
|
import { TrackEventName } from '@/constants/common';
|
||||||
@ -36,10 +36,10 @@ declare global {
|
|||||||
|
|
||||||
var vectorModels: VectorModelItemType[];
|
var vectorModels: VectorModelItemType[];
|
||||||
var chatModels: ChatModelItemType[];
|
var chatModels: ChatModelItemType[];
|
||||||
var qaModel: QAModelItemType;
|
var qaModels: LLMModelItemType[];
|
||||||
var extractModel: FunctionModelItemType;
|
var cqModels: FunctionModelItemType[];
|
||||||
var cqModel: FunctionModelItemType;
|
var extractModels: FunctionModelItemType[];
|
||||||
var qgModel: FunctionModelItemType;
|
var qgModels: LLMModelItemType[];
|
||||||
|
|
||||||
var priceMd: string;
|
var priceMd: string;
|
||||||
var systemVersion: string;
|
var systemVersion: string;
|
||||||
|
|||||||
34
projects/app/src/types/model.d.ts
vendored
34
projects/app/src/types/model.d.ts
vendored
@ -1,19 +1,23 @@
|
|||||||
export type ChatModelItemType = {
|
import { LLMModelUsageEnum } from '@/constants/model';
|
||||||
model: string;
|
|
||||||
name: string;
|
export type LLMModelItemType = {
|
||||||
contextMaxToken: number;
|
|
||||||
quoteMaxToken: number;
|
|
||||||
maxTemperature: number;
|
|
||||||
price: number;
|
|
||||||
censor?: boolean;
|
|
||||||
defaultSystem?: string;
|
|
||||||
};
|
|
||||||
export type QAModelItemType = {
|
|
||||||
model: string;
|
model: string;
|
||||||
name: string;
|
name: string;
|
||||||
maxToken: number;
|
maxToken: number;
|
||||||
price: number;
|
price: number;
|
||||||
};
|
};
|
||||||
|
export type ChatModelItemType = LLMModelItemType & {
|
||||||
|
quoteMaxToken: number;
|
||||||
|
maxTemperature: number;
|
||||||
|
censor?: boolean;
|
||||||
|
defaultSystemChatPrompt?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type FunctionModelItemType = LLMModelItemType & {
|
||||||
|
functionCall: boolean;
|
||||||
|
functionPrompt: string;
|
||||||
|
};
|
||||||
|
|
||||||
export type VectorModelItemType = {
|
export type VectorModelItemType = {
|
||||||
model: string;
|
model: string;
|
||||||
name: string;
|
name: string;
|
||||||
@ -21,11 +25,3 @@ export type VectorModelItemType = {
|
|||||||
price: number;
|
price: number;
|
||||||
maxToken: number;
|
maxToken: number;
|
||||||
};
|
};
|
||||||
export type FunctionModelItemType = {
|
|
||||||
model: string;
|
|
||||||
name: string;
|
|
||||||
maxToken: number;
|
|
||||||
price: number;
|
|
||||||
prompt: string;
|
|
||||||
functionCall: boolean;
|
|
||||||
};
|
|
||||||
|
|||||||
@ -36,9 +36,10 @@ export const getDefaultAppForm = (): EditFormType => {
|
|||||||
model: defaultChatModel?.model,
|
model: defaultChatModel?.model,
|
||||||
systemPrompt: '',
|
systemPrompt: '',
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
|
[SystemInputEnum.isResponseAnswerText]: true,
|
||||||
quotePrompt: '',
|
quotePrompt: '',
|
||||||
quoteTemplate: '',
|
quoteTemplate: '',
|
||||||
maxToken: defaultChatModel ? defaultChatModel.contextMaxToken / 2 : 4000,
|
maxToken: defaultChatModel ? defaultChatModel.maxToken / 2 : 4000,
|
||||||
frequency: 0.5,
|
frequency: 0.5,
|
||||||
presence: -0.5
|
presence: -0.5
|
||||||
},
|
},
|
||||||
@ -185,6 +186,13 @@ const chatModelInput = (formData: EditFormType): FlowInputItemType[] => [
|
|||||||
label: '系统提示词',
|
label: '系统提示词',
|
||||||
connected: true
|
connected: true
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: SystemInputEnum.isResponseAnswerText,
|
||||||
|
value: true,
|
||||||
|
type: 'hidden',
|
||||||
|
label: '返回AI内容',
|
||||||
|
connected: true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'quoteTemplate',
|
key: 'quoteTemplate',
|
||||||
value: formData.chatModel.quoteTemplate || '',
|
value: formData.chatModel.quoteTemplate || '',
|
||||||
@ -328,7 +336,7 @@ const simpleChatTemplate = (formData: EditFormType): AppModuleItemType[] => [
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '直接响应,无需配置',
|
description: '直接响应,无需配置',
|
||||||
type: 'hidden',
|
type: 'hidden',
|
||||||
targets: []
|
targets: []
|
||||||
@ -533,7 +541,7 @@ const kbTemplate = (formData: EditFormType): AppModuleItemType[] => [
|
|||||||
outputs: [
|
outputs: [
|
||||||
{
|
{
|
||||||
key: 'answerText',
|
key: 'answerText',
|
||||||
label: '模型回复',
|
label: 'AI回复',
|
||||||
description: '直接响应,无需配置',
|
description: '直接响应,无需配置',
|
||||||
type: 'hidden',
|
type: 'hidden',
|
||||||
targets: []
|
targets: []
|
||||||
|
|||||||
@ -12,11 +12,12 @@ export const splitText2Chunks = ({ text = '', maxLen }: { text: string; maxLen:
|
|||||||
const tempMarker = 'SPLIT_HERE_SPLIT_HERE';
|
const tempMarker = 'SPLIT_HERE_SPLIT_HERE';
|
||||||
|
|
||||||
const stepReg: Record<number, RegExp> = {
|
const stepReg: Record<number, RegExp> = {
|
||||||
0: /(\n)/g,
|
0: /(\n\n)/g,
|
||||||
1: /([。]|\.\s)/g,
|
1: /([\n])/g,
|
||||||
2: /([!?]|!\s|\?\s)/g,
|
2: /([。]|\.\s)/g,
|
||||||
3: /([;]|;\s)/g,
|
3: /([!?]|!\s|\?\s)/g,
|
||||||
4: /([,]|,\s)/g
|
4: /([;]|;\s)/g,
|
||||||
|
5: /([,]|,\s)/g
|
||||||
};
|
};
|
||||||
|
|
||||||
const splitTextRecursively = ({ text = '', step }: { text: string; step: number }) => {
|
const splitTextRecursively = ({ text = '', step }: { text: string; step: number }) => {
|
||||||
@ -43,7 +44,6 @@ export const splitText2Chunks = ({ text = '', maxLen }: { text: string; maxLen:
|
|||||||
.filter((part) => part);
|
.filter((part) => part);
|
||||||
|
|
||||||
let chunks: string[] = [];
|
let chunks: string[] = [];
|
||||||
|
|
||||||
let preChunk = '';
|
let preChunk = '';
|
||||||
let chunk = '';
|
let chunk = '';
|
||||||
for (let i = 0; i < splitTexts.length; i++) {
|
for (let i = 0; i < splitTexts.length; i++) {
|
||||||
|
|||||||
@ -1,34 +1,41 @@
|
|||||||
import {
|
|
||||||
type QAModelItemType,
|
|
||||||
type ChatModelItemType,
|
|
||||||
type VectorModelItemType
|
|
||||||
} from '@/types/model';
|
|
||||||
import type { InitDateResponse } from '@/global/common/api/systemRes';
|
import type { InitDateResponse } from '@/global/common/api/systemRes';
|
||||||
import { getSystemInitData } from '@/web/common/api/system';
|
import { getSystemInitData } from '@/web/common/api/system';
|
||||||
import { delay } from '@/utils/tools';
|
import { delay } from '@/utils/tools';
|
||||||
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
|
import type { FeConfigsType } from '@fastgpt/common/type/index.d';
|
||||||
|
import {
|
||||||
|
defaultChatModels,
|
||||||
|
defaultQAModels,
|
||||||
|
defaultCQModels,
|
||||||
|
defaultExtractModels,
|
||||||
|
defaultQGModels,
|
||||||
|
defaultVectorModels
|
||||||
|
} from '@/constants/model';
|
||||||
|
|
||||||
export let chatModelList: ChatModelItemType[] = [];
|
|
||||||
export let qaModel: QAModelItemType = {
|
|
||||||
model: 'gpt-3.5-turbo-16k',
|
|
||||||
name: 'GPT35-16k',
|
|
||||||
maxToken: 16000,
|
|
||||||
price: 0
|
|
||||||
};
|
|
||||||
export let vectorModelList: VectorModelItemType[] = [];
|
|
||||||
export let feConfigs: FeConfigsType = {};
|
export let feConfigs: FeConfigsType = {};
|
||||||
export let priceMd = '';
|
export let priceMd = '';
|
||||||
export let systemVersion = '0.0.0';
|
export let systemVersion = '0.0.0';
|
||||||
|
|
||||||
|
export let vectorModelList = defaultVectorModels;
|
||||||
|
export let chatModelList = defaultChatModels;
|
||||||
|
export let qaModelList = defaultQAModels;
|
||||||
|
export let cqModelList = defaultCQModels;
|
||||||
|
export let extractModelList = defaultExtractModels;
|
||||||
|
export let qgModelList = defaultQGModels;
|
||||||
|
|
||||||
let retryTimes = 3;
|
let retryTimes = 3;
|
||||||
|
|
||||||
export const clientInitData = async (): Promise<InitDateResponse> => {
|
export const clientInitData = async (): Promise<InitDateResponse> => {
|
||||||
try {
|
try {
|
||||||
const res = await getSystemInitData();
|
const res = await getSystemInitData();
|
||||||
|
|
||||||
chatModelList = res.chatModels;
|
chatModelList = res.chatModels || [];
|
||||||
qaModel = res.qaModel;
|
qaModelList = res.qaModels || [];
|
||||||
vectorModelList = res.vectorModels;
|
cqModelList = res.cqModels || [];
|
||||||
|
extractModelList = res.extractModels || [];
|
||||||
|
qgModelList = res.qgModels || [];
|
||||||
|
|
||||||
|
vectorModelList = res.vectorModels || [];
|
||||||
|
|
||||||
feConfigs = res.feConfigs;
|
feConfigs = res.feConfigs;
|
||||||
priceMd = res.priceMd;
|
priceMd = res.priceMd;
|
||||||
systemVersion = res.systemVersion;
|
systemVersion = res.systemVersion;
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user