docs and embedding bill

This commit is contained in:
archer 2023-06-05 18:58:38 +08:00
parent 1111f07fa7
commit 942aeeac2e
No known key found for this signature in database
GPG Key ID: 569A5660D2379E28
11 changed files with 66 additions and 32 deletions

View File

@ -34,13 +34,13 @@ run: ## Run a dev service from host.
.PHONY: docker-build .PHONY: docker-build
docker-build: ## Build docker image with the desktop-frontend. docker-build: ## Build docker image with the desktop-frontend.
docker build -t registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:latest . --network host --build-arg HTTP_PROXY=http://127.0.0.1:7890 --build-arg HTTPS_PROXY=http://127.0.0.1:7890 docker build -t c121914yu/fast-gpt:latest . --network host --build-arg HTTP_PROXY=http://127.0.0.1:7890 --build-arg HTTPS_PROXY=http://127.0.0.1:7890
##@ Deployment ##@ Deployment
.PHONY: docker-run .PHONY: docker-run
docker-run: ## Push docker image. docker-run: ## Push docker image.
docker run -d -p 8008:3000 --name fastgpt -v /web_project/yjl/fastgpt/logs:/app/.next/logs registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:latest docker run -d -p 8008:3000 --name fastgpt -v /web_project/yjl/fastgpt/logs:/app/.next/logs c121914yu/fast-gpt:latest
#TODO: add support of docker push #TODO: add support of docker push

View File

@ -31,7 +31,9 @@ services:
- /root/fastgpt/mongo/logs:/var/log/mongodb - /root/fastgpt/mongo/logs:/var/log/mongodb
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
fastgpt: fastgpt:
image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:latest image: ghcr.io/c121914yu/fast-gpt:latest # github
# image: c121914yu/fast-gpt:latest # docker hub
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:latest # 阿里云
network_mode: host network_mode: host
restart: always restart: always
container_name: fastgpt container_name: fastgpt

View File

@ -11,10 +11,10 @@
移动端:点击对话头像,可以选择复制或删除该条内容。 移动端:点击对话头像,可以选择复制或删除该条内容。
**价格表** **价格表**
如果使用了自己的 Api Key不会计费。可以在账号页看到详细账单。 如果使用了自己的 Api Key网页上 openai 模型聊天不会计费。可以在账号页,看到详细账单。
| 计费项 | 价格: 元/ 1K tokens包含上下文| | 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- | | --- | --- |
| 知识库 - 索引 | 免费 | | 知识库 - 索引 | 0.001 |
| chatgpt - 对话 | 0.025 | | chatgpt - 对话 | 0.025 |
| gpt4 - 对话 | 0.5 | | gpt4 - 对话 | 0.5 |
| 文件拆分 | 0.025 | | 文件拆分 | 0.025 |

View File

@ -15,10 +15,10 @@ FastGpt 项目完全开源,可随意私有化部署,去除平台风险忧虑
### 价格表 ### 价格表
如果使用了自己的 Api Key不会计费。可以在账号页看到详细账单。 如果使用了自己的 Api Key网页上 openai 模型聊天不会计费。可以在账号页,看到详细账单。
| 计费项 | 价格: 元/ 1K tokens包含上下文| | 计费项 | 价格: 元/ 1K tokens包含上下文|
| --- | --- | | --- | --- |
| 知识库 - 索引 | 免费 | | 知识库 - 索引 | 0.001 |
| chatgpt - 对话 | 0.025 | | chatgpt - 对话 | 0.025 |
| gpt4 - 对话 | 0.5 | | gpt4 - 对话 | 0.5 |
| 文件拆分 | 0.025 | | 文件拆分 | 0.025 |

View File

@ -3,6 +3,7 @@ import type { ShareChatEditType } from '@/types/model';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
export const embeddingModel = 'text-embedding-ada-002'; export const embeddingModel = 'text-embedding-ada-002';
export const embeddingPrice = 0.1;
export type EmbeddingModelType = 'text-embedding-ada-002'; export type EmbeddingModelType = 'text-embedding-ada-002';
export enum OpenAiChatEnum { export enum OpenAiChatEnum {

View File

@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { authUser, getSystemOpenAiKey } from '@/service/utils/auth'; import { authUser, getApiKey } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import { getOpenAIApi } from '@/service/utils/chat/openai'; import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model'; import { embeddingModel } from '@/constants/model';
@ -24,7 +24,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
} }
jsonRes<Response>(res, { jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, type }) data: await openaiEmbedding({ userId, input, type, mustPay: true })
}); });
} catch (err) { } catch (err) {
console.log(err); console.log(err);
@ -38,9 +38,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function openaiEmbedding({ export async function openaiEmbedding({
userId, userId,
input, input,
mustPay = false,
type = 'chat' type = 'chat'
}: { userId: string } & Props) { }: { userId: string; mustPay?: boolean } & Props) {
const apiKey = getSystemOpenAiKey(type); const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: 'gpt-3.5-turbo',
userId,
mustPay,
type
});
// 获取 chatAPI // 获取 chatAPI
const chatAPI = getOpenAIApi(); const chatAPI = getOpenAIApi();
@ -54,7 +60,7 @@ export async function openaiEmbedding({
}, },
{ {
timeout: 60000, timeout: 60000,
...axiosConfig(apiKey) ...axiosConfig(userOpenAiKey || systemAuthKey)
} }
) )
.then((res) => ({ .then((res) => ({
@ -63,7 +69,7 @@ export async function openaiEmbedding({
})); }));
pushGenerateVectorBill({ pushGenerateVectorBill({
isPay: false, isPay: !userOpenAiKey,
userId, userId,
text: input.join(''), text: input.join(''),
tokenLen: result.tokenLen tokenLen: result.tokenLen

View File

@ -22,20 +22,22 @@ import Radio from '@/components/Radio';
import { splitText_token } from '@/utils/file'; import { splitText_token } from '@/utils/file';
import { TrainingModeEnum } from '@/constants/plugin'; import { TrainingModeEnum } from '@/constants/plugin';
import { getErrText } from '@/utils/tools'; import { getErrText } from '@/utils/tools';
import { ChatModelMap, OpenAiChatEnum, embeddingPrice } from '@/constants/model';
import { formatPrice } from '@/utils/user';
const fileExtension = '.txt,.doc,.docx,.pdf,.md'; const fileExtension = '.txt,.doc,.docx,.pdf,.md';
const modeMap = { const modeMap = {
[TrainingModeEnum.qa]: { [TrainingModeEnum.qa]: {
maxLen: 2800, maxLen: 2600,
slideLen: 800, slideLen: 700,
price: 4, price: ChatModelMap[OpenAiChatEnum.GPT35].price,
isPrompt: true isPrompt: true
}, },
[TrainingModeEnum.index]: { [TrainingModeEnum.index]: {
maxLen: 800, maxLen: 700,
slideLen: 300, slideLen: 300,
price: 0.4, price: embeddingPrice,
isPrompt: false isPrompt: false
} }
}; };
@ -58,18 +60,18 @@ const SelectFileModal = ({
{ filename: '文本1', text: '' } { filename: '文本1', text: '' }
]); ]);
const [splitRes, setSplitRes] = useState<{ const [splitRes, setSplitRes] = useState<{
tokens: number; price: number;
chunks: { filename: string; value: string }[]; chunks: { filename: string; value: string }[];
successChunks: number; successChunks: number;
}>({ }>({
tokens: 0, price: 0,
successChunks: 0, successChunks: 0,
chunks: [] chunks: []
}); });
const { openConfirm, ConfirmChild } = useConfirm({ const { openConfirm, ConfirmChild } = useConfirm({
content: `确认导入该文件需要一定时间进行拆解该任务无法终止QA 拆分仅能使用余额,如果余额不足,未完成的任务会被直接清除。一共 ${ content: `确认导入该文件需要一定时间进行拆解该任务无法终止QA 拆分仅能使用余额,如果余额不足,未完成的任务会被直接清除。一共 ${
splitRes.chunks.length splitRes.chunks.length
} ${splitRes.tokens ? `大约 ${splitRes.tokens} 个tokens` : ''}` } ${splitRes.price ? `大约 ${splitRes.price}` : ''}`
}); });
const onSelectFile = useCallback( const onSelectFile = useCallback(
@ -166,8 +168,16 @@ const SelectFileModal = ({
})) }))
.filter((item) => item.tokens > 0); .filter((item) => item.tokens > 0);
let price = formatPrice(
splitRes.reduce((sum, item) => sum + item.tokens, 0) * modeMap[mode].price
);
if (mode === 'qa') {
price *= 1.2;
}
setSplitRes({ setSplitRes({
tokens: splitRes.reduce((sum, item) => sum + item.tokens, 0), price,
chunks: splitRes chunks: splitRes
.map((item) => .map((item) =>
item.chunks.map((chunk) => ({ item.chunks.map((chunk) => ({

View File

@ -17,6 +17,7 @@ import { useToast } from '@/hooks/useToast';
import { useQuery } from '@tanstack/react-query'; import { useQuery } from '@tanstack/react-query';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { getErrText } from '@/utils/tools'; import { getErrText } from '@/utils/tools';
import Markdown from '@/components/Markdown';
const PayModal = ({ onClose }: { onClose: () => void }) => { const PayModal = ({ onClose }: { onClose: () => void }) => {
const router = useRouter(); const router = useRouter();
@ -78,7 +79,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
}} }}
> >
<ModalOverlay /> <ModalOverlay />
<ModalContent> <ModalContent minW={'auto'}>
<ModalHeader></ModalHeader> <ModalHeader></ModalHeader>
{!payId && <ModalCloseButton />} {!payId && <ModalCloseButton />}
@ -86,7 +87,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
{!payId && ( {!payId && (
<> <>
<Grid gridTemplateColumns={'repeat(4,1fr)'} gridGap={5} mb={4}> <Grid gridTemplateColumns={'repeat(4,1fr)'} gridGap={5} mb={4}>
{[5, 10, 20, 50].map((item) => ( {[10, 20, 50, 100].map((item) => (
<Button <Button
key={item} key={item}
variant={item === inputVal ? 'solid' : 'outline'} variant={item === inputVal ? 'solid' : 'outline'}
@ -96,7 +97,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
</Button> </Button>
))} ))}
</Grid> </Grid>
<Box> <Box mb={4}>
<Input <Input
value={inputVal} value={inputVal}
type={'number'} type={'number'}
@ -107,6 +108,15 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
}} }}
></Input> ></Input>
</Box> </Box>
<Markdown
source={`
| | 价格: / 1K tokens()|
| --- | --- |
| - | 0.001 |
| chatgpt - | 0.025 |
| gpt4 - | 0.5 |
| | 0.025 |`}
/>
</> </>
)} )}
{/* 付费二维码 */} {/* 付费二维码 */}

View File

@ -82,7 +82,8 @@ export async function generateVector(): Promise<any> {
const vectors = await openaiEmbedding({ const vectors = await openaiEmbedding({
input: dataItems.map((item) => item.q), input: dataItems.map((item) => item.q),
userId, userId,
type: 'training' type: 'training',
mustPay: true
}); });
// 生成结果插入到 pg // 生成结果插入到 pg

View File

@ -1,5 +1,11 @@
import { connectToDatabase, Bill, User, ShareChat } from '../mongo'; import { connectToDatabase, Bill, User, ShareChat } from '../mongo';
import { ChatModelMap, OpenAiChatEnum, ChatModelType, embeddingModel } from '@/constants/model'; import {
ChatModelMap,
OpenAiChatEnum,
ChatModelType,
embeddingModel,
embeddingPrice
} from '@/constants/model';
import { BillTypeEnum } from '@/constants/user'; import { BillTypeEnum } from '@/constants/user';
export const pushChatBill = async ({ export const pushChatBill = async ({
@ -145,11 +151,9 @@ export const pushGenerateVectorBill = async ({
await connectToDatabase(); await connectToDatabase();
try { try {
const unitPrice = 0.4;
// 计算价格. 至少为1 // 计算价格. 至少为1
const price = 0; let price = embeddingPrice * tokenLen;
// let price = unitPrice * tokenLen; price = price > 1 ? price : 1;
// price = price > 1 ? price : 1;
// 插入 Bill 记录 // 插入 Bill 记录
const res = await Bill.create({ const res = await Bill.create({

View File

@ -28,7 +28,7 @@ const UserSchema = new Schema({
balance: { balance: {
// 平台余额,不可提现 // 平台余额,不可提现
type: Number, type: Number,
default: 0.5 * PRICE_SCALE default: 2 * PRICE_SCALE
}, },
inviterId: { inviterId: {
// 谁邀请注册的 // 谁邀请注册的