External dataset (#1485)
* fix: revert version * feat: external collection * import context * external ui * doc * fix: ts * clear invalid data * feat: rename sub name * fix: node if else edge remove * fix: init * api size * fix: if else node refresh
This commit is contained in:
parent
fb04889a31
commit
cd876251b7
13
.vscode/nextapi.code-snippets
vendored
13
.vscode/nextapi.code-snippets
vendored
@ -40,18 +40,11 @@
|
|||||||
"",
|
"",
|
||||||
"type ContextType = {$1};",
|
"type ContextType = {$1};",
|
||||||
"",
|
"",
|
||||||
"type ContextValueType = {};",
|
|
||||||
"",
|
|
||||||
"export const Context = createContext<ContextType>({});",
|
"export const Context = createContext<ContextType>({});",
|
||||||
"",
|
"",
|
||||||
"export const ContextProvider = ({",
|
"export const ContextProvider = ({ children }: { children: ReactNode }) => {",
|
||||||
" children,",
|
" const contextValue: ContextType = {};",
|
||||||
" value",
|
" return <Context.Provider value={contextValue}>{children}</Context.Provider>;",
|
||||||
"}: {",
|
|
||||||
" children: ReactNode;",
|
|
||||||
" value: ContextValueType;",
|
|
||||||
"}) => {",
|
|
||||||
" return <Context.Provider value={value}>{children}</Context.Provider>;",
|
|
||||||
"};",
|
"};",
|
||||||
],
|
],
|
||||||
"description": "FastGPT usecontext template"
|
"description": "FastGPT usecontext template"
|
||||||
|
|||||||
@ -36,6 +36,8 @@ COPY --from=mainDeps /app/projects/$name/node_modules ./projects/$name/node_modu
|
|||||||
RUN [ -z "$proxy" ] || sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories
|
RUN [ -z "$proxy" ] || sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories
|
||||||
|
|
||||||
RUN apk add --no-cache libc6-compat && npm install -g pnpm@8.6.0
|
RUN apk add --no-cache libc6-compat && npm install -g pnpm@8.6.0
|
||||||
|
|
||||||
|
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||||
RUN pnpm --filter=$name build
|
RUN pnpm --filter=$name build
|
||||||
|
|
||||||
# --------- runner -----------
|
# --------- runner -----------
|
||||||
|
|||||||
@ -118,4 +118,5 @@ OneAPI 的 API Key 配置错误,需要修改`OPENAI_API_KEY`环境变量,并
|
|||||||
### bad_response_status_code bad response status code 503
|
### bad_response_status_code bad response status code 503
|
||||||
|
|
||||||
1. 模型服务不可用
|
1. 模型服务不可用
|
||||||
2. ....
|
2. 模型接口参数异常(温度、max token等可能不适配)
|
||||||
|
3. ....
|
||||||
@ -35,4 +35,5 @@ curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
|
|||||||
## V4.8.1 更新说明
|
## V4.8.1 更新说明
|
||||||
|
|
||||||
1. 新增 - 知识库重新选择向量模型重建
|
1. 新增 - 知识库重新选择向量模型重建
|
||||||
2. 修复 - 定时器清理脏数据任务
|
2. 修复 - 工作流删除节点的动态输入和输出时候,没有正确的删除连接线,导致可能出现逻辑异常。
|
||||||
|
3. 修复 - 定时器清理脏数据任务
|
||||||
8
packages/global/core/dataset/api.d.ts
vendored
8
packages/global/core/dataset/api.d.ts
vendored
@ -11,14 +11,16 @@ export type DatasetUpdateBody = {
|
|||||||
intro?: string;
|
intro?: string;
|
||||||
permission?: DatasetSchemaType['permission'];
|
permission?: DatasetSchemaType['permission'];
|
||||||
agentModel?: LLMModelItemType;
|
agentModel?: LLMModelItemType;
|
||||||
websiteConfig?: DatasetSchemaType['websiteConfig'];
|
|
||||||
status?: DatasetSchemaType['status'];
|
status?: DatasetSchemaType['status'];
|
||||||
|
|
||||||
|
websiteConfig?: DatasetSchemaType['websiteConfig'];
|
||||||
|
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ================= collection ===================== */
|
/* ================= collection ===================== */
|
||||||
export type DatasetCollectionChunkMetadataType = {
|
export type DatasetCollectionChunkMetadataType = {
|
||||||
parentId?: string;
|
parentId?: string;
|
||||||
trainingType?: `${TrainingModeEnum}`;
|
trainingType?: TrainingModeEnum;
|
||||||
chunkSize?: number;
|
chunkSize?: number;
|
||||||
chunkSplitter?: string;
|
chunkSplitter?: string;
|
||||||
qaPrompt?: string;
|
qaPrompt?: string;
|
||||||
@ -78,7 +80,7 @@ export type PostWebsiteSyncParams = {
|
|||||||
export type PushDatasetDataProps = {
|
export type PushDatasetDataProps = {
|
||||||
collectionId: string;
|
collectionId: string;
|
||||||
data: PushDatasetDataChunkProps[];
|
data: PushDatasetDataChunkProps[];
|
||||||
trainingMode: `${TrainingModeEnum}`;
|
trainingMode: TrainingModeEnum;
|
||||||
prompt?: string;
|
prompt?: string;
|
||||||
billId?: string;
|
billId?: string;
|
||||||
};
|
};
|
||||||
|
|||||||
6
packages/global/core/dataset/collection/constants.ts
Normal file
6
packages/global/core/dataset/collection/constants.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
/* sourceId = prefix-id; id=fileId;link url;externalId */
|
||||||
|
export enum CollectionSourcePrefixEnum {
|
||||||
|
local = 'local',
|
||||||
|
link = 'link',
|
||||||
|
external = 'external'
|
||||||
|
}
|
||||||
@ -2,23 +2,29 @@
|
|||||||
export enum DatasetTypeEnum {
|
export enum DatasetTypeEnum {
|
||||||
folder = 'folder',
|
folder = 'folder',
|
||||||
dataset = 'dataset',
|
dataset = 'dataset',
|
||||||
websiteDataset = 'websiteDataset' // depp link
|
websiteDataset = 'websiteDataset', // depp link
|
||||||
|
externalFile = 'externalFile'
|
||||||
}
|
}
|
||||||
export const DatasetTypeMap = {
|
export const DatasetTypeMap = {
|
||||||
[DatasetTypeEnum.folder]: {
|
[DatasetTypeEnum.folder]: {
|
||||||
icon: 'common/folderFill',
|
icon: 'common/folderFill',
|
||||||
label: 'core.dataset.Folder Dataset',
|
label: 'Folder Dataset',
|
||||||
collectionLabel: 'common.Folder'
|
collectionLabel: 'common.Folder'
|
||||||
},
|
},
|
||||||
[DatasetTypeEnum.dataset]: {
|
[DatasetTypeEnum.dataset]: {
|
||||||
icon: 'core/dataset/commonDataset',
|
icon: 'core/dataset/commonDataset',
|
||||||
label: 'core.dataset.Common Dataset',
|
label: 'Common Dataset',
|
||||||
collectionLabel: 'common.File'
|
collectionLabel: 'common.File'
|
||||||
},
|
},
|
||||||
[DatasetTypeEnum.websiteDataset]: {
|
[DatasetTypeEnum.websiteDataset]: {
|
||||||
icon: 'core/dataset/websiteDataset',
|
icon: 'core/dataset/websiteDataset',
|
||||||
label: 'core.dataset.Website Dataset',
|
label: 'Website Dataset',
|
||||||
collectionLabel: 'common.Website'
|
collectionLabel: 'common.Website'
|
||||||
|
},
|
||||||
|
[DatasetTypeEnum.externalFile]: {
|
||||||
|
icon: 'core/dataset/commonDataset',
|
||||||
|
label: 'External File',
|
||||||
|
collectionLabel: 'common.File'
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -77,7 +83,8 @@ export enum ImportDataSourceEnum {
|
|||||||
fileLocal = 'fileLocal',
|
fileLocal = 'fileLocal',
|
||||||
fileLink = 'fileLink',
|
fileLink = 'fileLink',
|
||||||
fileCustom = 'fileCustom',
|
fileCustom = 'fileCustom',
|
||||||
csvTable = 'csvTable'
|
csvTable = 'csvTable',
|
||||||
|
externalFile = 'externalFile'
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum TrainingModeEnum {
|
export enum TrainingModeEnum {
|
||||||
|
|||||||
23
packages/global/core/dataset/type.d.ts
vendored
23
packages/global/core/dataset/type.d.ts
vendored
@ -22,13 +22,16 @@ export type DatasetSchemaType = {
|
|||||||
vectorModel: string;
|
vectorModel: string;
|
||||||
agentModel: string;
|
agentModel: string;
|
||||||
intro: string;
|
intro: string;
|
||||||
type: `${DatasetTypeEnum}`;
|
type: DatasetTypeEnum;
|
||||||
status: `${DatasetStatusEnum}`;
|
status: `${DatasetStatusEnum}`;
|
||||||
permission: `${PermissionTypeEnum}`;
|
permission: `${PermissionTypeEnum}`;
|
||||||
|
|
||||||
|
// metadata
|
||||||
websiteConfig?: {
|
websiteConfig?: {
|
||||||
url: string;
|
url: string;
|
||||||
selector: string;
|
selector: string;
|
||||||
};
|
};
|
||||||
|
externalReadUrl?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DatasetCollectionSchemaType = {
|
export type DatasetCollectionSchemaType = {
|
||||||
@ -42,16 +45,18 @@ export type DatasetCollectionSchemaType = {
|
|||||||
createTime: Date;
|
createTime: Date;
|
||||||
updateTime: Date;
|
updateTime: Date;
|
||||||
|
|
||||||
trainingType: `${TrainingModeEnum}`;
|
trainingType: TrainingModeEnum;
|
||||||
chunkSize: number;
|
chunkSize: number;
|
||||||
chunkSplitter?: string;
|
chunkSplitter?: string;
|
||||||
qaPrompt?: string;
|
qaPrompt?: string;
|
||||||
|
|
||||||
fileId?: string;
|
sourceId?: string; // relate CollectionSourcePrefixEnum
|
||||||
rawLink?: string;
|
fileId?: string; // local file id
|
||||||
|
rawLink?: string; // link url
|
||||||
|
|
||||||
rawTextLength?: number;
|
rawTextLength?: number;
|
||||||
hashRawText?: string;
|
hashRawText?: string;
|
||||||
|
externalSourceUrl?: string; // external import url
|
||||||
metadata?: {
|
metadata?: {
|
||||||
webPageSelector?: string;
|
webPageSelector?: string;
|
||||||
relatedImgId?: string; // The id of the associated image collections
|
relatedImgId?: string; // The id of the associated image collections
|
||||||
@ -93,7 +98,7 @@ export type DatasetTrainingSchemaType = {
|
|||||||
billId: string;
|
billId: string;
|
||||||
expireAt: Date;
|
expireAt: Date;
|
||||||
lockTime: Date;
|
lockTime: Date;
|
||||||
mode: `${TrainingModeEnum}`;
|
mode: TrainingModeEnum;
|
||||||
model: string;
|
model: string;
|
||||||
prompt: string;
|
prompt: string;
|
||||||
dataId?: string;
|
dataId?: string;
|
||||||
@ -112,13 +117,19 @@ export type DatasetDataWithCollectionType = Omit<DatasetDataSchemaType, 'collect
|
|||||||
};
|
};
|
||||||
|
|
||||||
/* ================= dataset ===================== */
|
/* ================= dataset ===================== */
|
||||||
|
export type DatasetSimpleItemType = {
|
||||||
|
_id: string;
|
||||||
|
avatar: string;
|
||||||
|
name: string;
|
||||||
|
vectorModel: VectorModelItemType;
|
||||||
|
};
|
||||||
export type DatasetListItemType = {
|
export type DatasetListItemType = {
|
||||||
_id: string;
|
_id: string;
|
||||||
parentId: string;
|
parentId: string;
|
||||||
avatar: string;
|
avatar: string;
|
||||||
name: string;
|
name: string;
|
||||||
intro: string;
|
intro: string;
|
||||||
type: `${DatasetTypeEnum}`;
|
type: DatasetTypeEnum;
|
||||||
isOwner: boolean;
|
isOwner: boolean;
|
||||||
canWrite: boolean;
|
canWrite: boolean;
|
||||||
permission: `${PermissionTypeEnum}`;
|
permission: `${PermissionTypeEnum}`;
|
||||||
|
|||||||
@ -46,7 +46,7 @@ export function getDefaultIndex(props?: { q?: string; a?: string; dataId?: strin
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export const predictDataLimitLength = (mode: `${TrainingModeEnum}`, data: any[]) => {
|
export const predictDataLimitLength = (mode: TrainingModeEnum, data: any[]) => {
|
||||||
if (mode === TrainingModeEnum.qa) return data.length * 20;
|
if (mode === TrainingModeEnum.qa) return data.length * 20;
|
||||||
if (mode === TrainingModeEnum.auto) return data.length * 5;
|
if (mode === TrainingModeEnum.auto) return data.length * 5;
|
||||||
return data.length;
|
return data.length;
|
||||||
|
|||||||
@ -18,6 +18,7 @@ export const AssignedAnswerModule: FlowNodeTemplateType = {
|
|||||||
intro:
|
intro:
|
||||||
'该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。',
|
'该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。',
|
||||||
version: '481',
|
version: '481',
|
||||||
|
isTool: true,
|
||||||
inputs: [
|
inputs: [
|
||||||
{
|
{
|
||||||
key: NodeInputKeyEnum.answerText,
|
key: NodeInputKeyEnum.answerText,
|
||||||
|
|||||||
@ -16,11 +16,6 @@ const DatasetCollectionSchema = new Schema({
|
|||||||
ref: DatasetColCollectionName,
|
ref: DatasetColCollectionName,
|
||||||
default: null
|
default: null
|
||||||
},
|
},
|
||||||
userId: {
|
|
||||||
// abandoned
|
|
||||||
type: Schema.Types.ObjectId,
|
|
||||||
ref: 'user'
|
|
||||||
},
|
|
||||||
teamId: {
|
teamId: {
|
||||||
type: Schema.Types.ObjectId,
|
type: Schema.Types.ObjectId,
|
||||||
ref: TeamCollectionName,
|
ref: TeamCollectionName,
|
||||||
@ -54,6 +49,7 @@ const DatasetCollectionSchema = new Schema({
|
|||||||
default: () => new Date()
|
default: () => new Date()
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// chunk filed
|
||||||
trainingType: {
|
trainingType: {
|
||||||
type: String,
|
type: String,
|
||||||
enum: Object.keys(TrainingTypeMap),
|
enum: Object.keys(TrainingTypeMap),
|
||||||
@ -70,20 +66,21 @@ const DatasetCollectionSchema = new Schema({
|
|||||||
type: String
|
type: String
|
||||||
},
|
},
|
||||||
|
|
||||||
|
sourceId: String,
|
||||||
|
// local file collection
|
||||||
fileId: {
|
fileId: {
|
||||||
type: Schema.Types.ObjectId,
|
type: Schema.Types.ObjectId,
|
||||||
ref: 'dataset.files'
|
ref: 'dataset.files'
|
||||||
},
|
},
|
||||||
rawLink: {
|
// web link collection
|
||||||
type: String
|
rawLink: String,
|
||||||
},
|
|
||||||
|
|
||||||
rawTextLength: {
|
// external collection
|
||||||
type: Number
|
|
||||||
},
|
// metadata
|
||||||
hashRawText: {
|
rawTextLength: Number,
|
||||||
type: String
|
hashRawText: String,
|
||||||
},
|
externalSourceUrl: String, // external import url
|
||||||
metadata: {
|
metadata: {
|
||||||
type: Object,
|
type: Object,
|
||||||
default: {}
|
default: {}
|
||||||
|
|||||||
@ -89,7 +89,8 @@ const DatasetSchema = new Schema({
|
|||||||
default: 'body'
|
default: 'body'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
externalReadUrl: String
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import {
|
|||||||
} from '@fastgpt/global/support/wallet/sub/constants';
|
} from '@fastgpt/global/support/wallet/sub/constants';
|
||||||
import type { TeamSubSchema } from '@fastgpt/global/support/wallet/sub/type';
|
import type { TeamSubSchema } from '@fastgpt/global/support/wallet/sub/type';
|
||||||
|
|
||||||
export const subCollectionName = 'team.subscriptions';
|
export const subCollectionName = 'team_subscriptions';
|
||||||
|
|
||||||
const SubSchema = new Schema({
|
const SubSchema = new Schema({
|
||||||
teamId: {
|
teamId: {
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
import { DragHandleIcon } from '@chakra-ui/icons';
|
import { DragHandleIcon } from '@chakra-ui/icons';
|
||||||
import { Box } from '@chakra-ui/react';
|
import { Box, BoxProps } from '@chakra-ui/react';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { DraggableProvided } from 'react-beautiful-dnd';
|
import { DraggableProvided } from 'react-beautiful-dnd';
|
||||||
|
|
||||||
const DragIcon = ({ provided }: { provided: DraggableProvided }) => {
|
const DragIcon = ({ provided, ...props }: { provided: DraggableProvided } & BoxProps) => {
|
||||||
return (
|
return (
|
||||||
<Box {...provided.dragHandleProps}>
|
<Box {...provided.dragHandleProps} {...props}>
|
||||||
<DragHandleIcon color={'myGray.500'} _hover={{ color: 'primary.600' }} />
|
<DragHandleIcon color={'myGray.500'} _hover={{ color: 'primary.600' }} />
|
||||||
</Box>
|
</Box>
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,12 +1,6 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
"parser": "@typescript-eslint/parser", // 确保使用了 TypeScript 解析器
|
|
||||||
"plugins": ["@typescript-eslint"], // 引入 TypeScript 插件
|
|
||||||
|
|
||||||
"extends": "next/core-web-vitals",
|
"extends": "next/core-web-vitals",
|
||||||
"rules": {
|
"rules": {
|
||||||
"react-hooks/rules-of-hooks": 0,
|
"react-hooks/rules-of-hooks": 0
|
||||||
"@typescript-eslint/consistent-type-imports": "warn" // 或者 "error" 来强制执行
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
|
"Add new": "Add new",
|
||||||
"App": "App",
|
"App": "App",
|
||||||
"Export": "Export",
|
"Export": "Export",
|
||||||
"Folder": "Folder",
|
"Folder": "Folder",
|
||||||
@ -509,18 +510,14 @@
|
|||||||
"Choose Dataset": "Associate dataset",
|
"Choose Dataset": "Associate dataset",
|
||||||
"Chunk amount": "Number of chunks",
|
"Chunk amount": "Number of chunks",
|
||||||
"Collection": "Dataset",
|
"Collection": "Dataset",
|
||||||
"Common Dataset": "Common dataset",
|
|
||||||
"Common Dataset Desc": "Can be built by importing files, web links, or manual entry",
|
|
||||||
"Create dataset": "Create a dataset",
|
"Create dataset": "Create a dataset",
|
||||||
"Dataset": "Dataset",
|
"Dataset": "Dataset",
|
||||||
"Dataset ID": "Dataset ID",
|
"Dataset ID": "Dataset ID",
|
||||||
"Dataset Type": "Dataset type",
|
"Dataset Type": "Dataset type",
|
||||||
"Delete Confirm": "Confirm to delete this dataset? Data cannot be recovered after deletion, please confirm!",
|
"Delete Confirm": "Confirm to delete this dataset? Data cannot be recovered after deletion, please confirm!",
|
||||||
"Delete Website Tips": "Confirm to delete this site?",
|
"Delete Website Tips": "Confirm to delete this site?",
|
||||||
"Empty Dataset": "",
|
|
||||||
"Empty Dataset Tips": "No datasets yet, go create one!",
|
"Empty Dataset Tips": "No datasets yet, go create one!",
|
||||||
"File collection": "File dataset",
|
"File collection": "File dataset",
|
||||||
"Folder Dataset": "Folder",
|
|
||||||
"Folder placeholder": "This is a directory",
|
"Folder placeholder": "This is a directory",
|
||||||
"Go Dataset": "Go to dataset",
|
"Go Dataset": "Go to dataset",
|
||||||
"Intro Placeholder": "This dataset has no introduction~",
|
"Intro Placeholder": "This dataset has no introduction~",
|
||||||
@ -540,8 +537,6 @@
|
|||||||
"Table collection": "Table dataset",
|
"Table collection": "Table dataset",
|
||||||
"Text collection": "Text dataset",
|
"Text collection": "Text dataset",
|
||||||
"Total chunks": "Total chunks: {{total}}",
|
"Total chunks": "Total chunks: {{total}}",
|
||||||
"Website Dataset": "Web site synchronization",
|
|
||||||
"Website Dataset Desc": "Web site synchronization allows you to use a web page link to build a dataset",
|
|
||||||
"collection": {
|
"collection": {
|
||||||
"Click top config website": "Click to configure website",
|
"Click top config website": "Click to configure website",
|
||||||
"Collection name": "Dataset name",
|
"Collection name": "Dataset name",
|
||||||
|
|||||||
@ -1,6 +1,17 @@
|
|||||||
{
|
{
|
||||||
|
"Common Dataset": "Common dataset",
|
||||||
|
"Common Dataset Desc": "Can be built by importing files, web links, or manual entry",
|
||||||
"Confirm to rebuild embedding tip": "Are you sure to switch the knowledge base index? Switching index is a very heavy operation that requires re-indexing all the data in your knowledge base, which may take a long time. Please ensure that the remaining points in your account are sufficient.",
|
"Confirm to rebuild embedding tip": "Are you sure to switch the knowledge base index? Switching index is a very heavy operation that requires re-indexing all the data in your knowledge base, which may take a long time. Please ensure that the remaining points in your account are sufficient.",
|
||||||
|
"External file": "External file",
|
||||||
|
"External file Dataset Desc": "You can import files from an external file library to build a knowledge base. Files are not stored twice",
|
||||||
|
"External id": "File id",
|
||||||
|
"External read url": "External read url",
|
||||||
|
"External url": "File read url",
|
||||||
|
"Folder Dataset": "Folder",
|
||||||
"Rebuild embedding start tip": "The task of switching index models has begun",
|
"Rebuild embedding start tip": "The task of switching index models has begun",
|
||||||
"Rebuilding index count": "Rebuilding count: {{count}}",
|
"Rebuilding index count": "Rebuilding count: {{count}}",
|
||||||
"The knowledge base has indexes that are being trained or being rebuilt": "The knowledge base has indexes that are being trained or being rebuilt"
|
"The knowledge base has indexes that are being trained or being rebuilt": "The knowledge base has indexes that are being trained or being rebuilt",
|
||||||
|
"Website Dataset": "Web site",
|
||||||
|
"Website Dataset Desc": "Web site synchronization allows you to use a web page link to build a dataset",
|
||||||
|
"filename": "filename"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
|
"Add new": "新增",
|
||||||
"App": "应用",
|
"App": "应用",
|
||||||
"Export": "导出",
|
"Export": "导出",
|
||||||
"Folder": "文件夹",
|
"Folder": "文件夹",
|
||||||
@ -509,8 +510,6 @@
|
|||||||
"Choose Dataset": "关联知识库",
|
"Choose Dataset": "关联知识库",
|
||||||
"Chunk amount": "分段数",
|
"Chunk amount": "分段数",
|
||||||
"Collection": "数据集",
|
"Collection": "数据集",
|
||||||
"Common Dataset": "通用知识库",
|
|
||||||
"Common Dataset Desc": "可通过导入文件、网页链接或手动录入形式构建知识库",
|
|
||||||
"Create dataset": "创建一个知识库",
|
"Create dataset": "创建一个知识库",
|
||||||
"Dataset": "知识库",
|
"Dataset": "知识库",
|
||||||
"Dataset ID": "知识库 ID",
|
"Dataset ID": "知识库 ID",
|
||||||
@ -520,7 +519,6 @@
|
|||||||
"Empty Dataset": "",
|
"Empty Dataset": "",
|
||||||
"Empty Dataset Tips": "还没有知识库,快去创建一个吧!",
|
"Empty Dataset Tips": "还没有知识库,快去创建一个吧!",
|
||||||
"File collection": "文件数据集",
|
"File collection": "文件数据集",
|
||||||
"Folder Dataset": "文件夹",
|
|
||||||
"Folder placeholder": "这是一个目录",
|
"Folder placeholder": "这是一个目录",
|
||||||
"Go Dataset": "前往知识库",
|
"Go Dataset": "前往知识库",
|
||||||
"Intro Placeholder": "这个知识库还没有介绍~",
|
"Intro Placeholder": "这个知识库还没有介绍~",
|
||||||
@ -540,8 +538,6 @@
|
|||||||
"Table collection": "表格数据集",
|
"Table collection": "表格数据集",
|
||||||
"Text collection": "文本数据集",
|
"Text collection": "文本数据集",
|
||||||
"Total chunks": "总分段: {{total}}",
|
"Total chunks": "总分段: {{total}}",
|
||||||
"Website Dataset": "Web 站点同步",
|
|
||||||
"Website Dataset Desc": "Web 站点同步允许你直接使用一个网页链接构建知识库",
|
|
||||||
"collection": {
|
"collection": {
|
||||||
"Click top config website": "点击配置网站",
|
"Click top config website": "点击配置网站",
|
||||||
"Collection name": "数据集名称",
|
"Collection name": "数据集名称",
|
||||||
|
|||||||
@ -1,6 +1,17 @@
|
|||||||
{
|
{
|
||||||
|
"Common Dataset": "通用知识库",
|
||||||
|
"Common Dataset Desc": "可通过导入文件、网页链接或手动录入形式构建知识库",
|
||||||
"Confirm to rebuild embedding tip": "确认为知识库切换索引?\n切换索引是一个非常重量的操作,需要对您知识库内所有数据进行重新索引,时间可能较长,请确保账号内剩余积分充足。",
|
"Confirm to rebuild embedding tip": "确认为知识库切换索引?\n切换索引是一个非常重量的操作,需要对您知识库内所有数据进行重新索引,时间可能较长,请确保账号内剩余积分充足。",
|
||||||
|
"External File": "外部文件库",
|
||||||
|
"External file Dataset Desc": "可以从外部文件库导入文件构建知识库,文件不会进行二次存储",
|
||||||
|
"External id": "文件阅读ID",
|
||||||
|
"External read url": "外部预览地址",
|
||||||
|
"External url": "文件访问URL",
|
||||||
|
"Folder Dataset": "文件夹",
|
||||||
"Rebuild embedding start tip": "切换索引模型任务已开始",
|
"Rebuild embedding start tip": "切换索引模型任务已开始",
|
||||||
"Rebuilding index count": "重建中索引数量: {{count}}",
|
"Rebuilding index count": "重建中索引数量: {{count}}",
|
||||||
"The knowledge base has indexes that are being trained or being rebuilt": "知识库有训练中或正在重建的索引"
|
"The knowledge base has indexes that are being trained or being rebuilt": "知识库有训练中或正在重建的索引",
|
||||||
|
"Website Dataset": "Web 站点同步",
|
||||||
|
"Website Dataset Desc": "Web 站点同步允许你直接使用一个网页链接构建知识库",
|
||||||
|
"filename": "文件名"
|
||||||
}
|
}
|
||||||
|
|||||||
14
projects/app/src/components/common/NextHead/index.tsx
Normal file
14
projects/app/src/components/common/NextHead/index.tsx
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import Head from 'next/head';
|
||||||
|
import React from 'react';
|
||||||
|
|
||||||
|
const NextHead = ({ title, icon, desc }: { title?: string; icon?: string; desc?: string }) => {
|
||||||
|
return (
|
||||||
|
<Head>
|
||||||
|
<title>{title}</title>
|
||||||
|
{desc && <meta name="description" content={desc} />}
|
||||||
|
{icon && <link rel="icon" href={icon} />}
|
||||||
|
</Head>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default NextHead;
|
||||||
@ -1,12 +1,12 @@
|
|||||||
import { Box, Flex, FlexProps } from '@chakra-ui/react';
|
import { Box, Flex, FlexProps } from '@chakra-ui/react';
|
||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import { useTranslation } from 'next-i18next';
|
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { useI18n } from '@/web/context/I18n';
|
||||||
|
|
||||||
const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & FlexProps) => {
|
const DatasetTypeTag = ({ type, ...props }: { type: DatasetTypeEnum } & FlexProps) => {
|
||||||
const { t } = useTranslation();
|
const { datasetT } = useI18n();
|
||||||
|
|
||||||
const item = DatasetTypeMap[type] || DatasetTypeMap['dataset'];
|
const item = DatasetTypeMap[type] || DatasetTypeMap['dataset'];
|
||||||
|
|
||||||
@ -22,7 +22,8 @@ const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & Fle
|
|||||||
{...props}
|
{...props}
|
||||||
>
|
>
|
||||||
<MyIcon name={item.icon as any} w={'16px'} mr={2} color={'myGray.400'} />
|
<MyIcon name={item.icon as any} w={'16px'} mr={2} color={'myGray.400'} />
|
||||||
<Box>{t(item.label)}</Box>
|
{/* @ts-ignore */}
|
||||||
|
<Box>{datasetT(item.label)}</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -50,16 +50,11 @@ const ListItem = ({
|
|||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { getZoom } = useReactFlow();
|
const { getZoom } = useReactFlow();
|
||||||
|
const onDelEdge = useContextSelector(WorkflowContext, (v) => v.onDelEdge);
|
||||||
|
const handleId = getHandleId(nodeId, 'source', getElseIFLabel(conditionIndex));
|
||||||
|
|
||||||
return (
|
const Render = useMemo(() => {
|
||||||
<Box
|
return (
|
||||||
ref={provided.innerRef}
|
|
||||||
{...provided.draggableProps}
|
|
||||||
style={{
|
|
||||||
...provided.draggableProps.style,
|
|
||||||
opacity: snapshot.isDragging ? 0.8 : 1
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<Flex
|
<Flex
|
||||||
alignItems={'center'}
|
alignItems={'center'}
|
||||||
position={'relative'}
|
position={'relative'}
|
||||||
@ -68,7 +63,10 @@ const ListItem = ({
|
|||||||
>
|
>
|
||||||
<Container w={snapshot.isDragging ? '' : 'full'} className="nodrag">
|
<Container w={snapshot.isDragging ? '' : 'full'} className="nodrag">
|
||||||
<Flex mb={4} alignItems={'center'}>
|
<Flex mb={4} alignItems={'center'}>
|
||||||
{ifElseList.length > 1 && <DragIcon provided={provided} />}
|
<DragIcon
|
||||||
|
visibility={ifElseList.length > 1 ? 'visible' : 'hidden'}
|
||||||
|
provided={provided}
|
||||||
|
/>
|
||||||
<Box color={'black'} fontSize={'lg'} ml={2}>
|
<Box color={'black'} fontSize={'lg'} ml={2}>
|
||||||
{getElseIFLabel(conditionIndex)}
|
{getElseIFLabel(conditionIndex)}
|
||||||
</Box>
|
</Box>
|
||||||
@ -109,6 +107,10 @@ const ListItem = ({
|
|||||||
color={'myGray.400'}
|
color={'myGray.400'}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onUpdateIfElseList(ifElseList.filter((_, index) => index !== conditionIndex));
|
onUpdateIfElseList(ifElseList.filter((_, index) => index !== conditionIndex));
|
||||||
|
onDelEdge({
|
||||||
|
nodeId,
|
||||||
|
sourceHandle: handleId
|
||||||
|
});
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
@ -185,21 +187,21 @@ const ListItem = ({
|
|||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
onUpdateIfElseList(
|
onUpdateIfElseList(
|
||||||
ifElseList.map((ifElse, index) => {
|
ifElseList.map((ifElse, index) => {
|
||||||
if (index === conditionIndex) {
|
return {
|
||||||
return {
|
...ifElse,
|
||||||
...ifElse,
|
list:
|
||||||
list: ifElse.list.map((item, index) => {
|
index === conditionIndex
|
||||||
if (index === i) {
|
? ifElse.list.map((item, index) => {
|
||||||
return {
|
if (index === i) {
|
||||||
...item,
|
return {
|
||||||
value: e
|
...item,
|
||||||
};
|
value: e
|
||||||
}
|
};
|
||||||
return item;
|
}
|
||||||
})
|
return item;
|
||||||
};
|
})
|
||||||
}
|
: ifElse.list
|
||||||
return ifElse;
|
};
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
@ -263,12 +265,38 @@ const ListItem = ({
|
|||||||
{!snapshot.isDragging && (
|
{!snapshot.isDragging && (
|
||||||
<SourceHandle
|
<SourceHandle
|
||||||
nodeId={nodeId}
|
nodeId={nodeId}
|
||||||
handleId={getHandleId(nodeId, 'source', getElseIFLabel(conditionIndex))}
|
handleId={handleId}
|
||||||
position={Position.Right}
|
position={Position.Right}
|
||||||
translate={[18, 0]}
|
translate={[18, 0]}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</Flex>
|
</Flex>
|
||||||
|
);
|
||||||
|
}, [
|
||||||
|
conditionIndex,
|
||||||
|
conditionItem.condition,
|
||||||
|
conditionItem.list,
|
||||||
|
getZoom,
|
||||||
|
handleId,
|
||||||
|
ifElseList,
|
||||||
|
nodeId,
|
||||||
|
onDelEdge,
|
||||||
|
onUpdateIfElseList,
|
||||||
|
provided,
|
||||||
|
snapshot.isDragging,
|
||||||
|
t
|
||||||
|
]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Box
|
||||||
|
ref={provided.innerRef}
|
||||||
|
{...provided.draggableProps}
|
||||||
|
style={{
|
||||||
|
...provided.draggableProps.style,
|
||||||
|
opacity: snapshot.isDragging ? 0.8 : 1
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{Render}
|
||||||
</Box>
|
</Box>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@ -387,35 +415,39 @@ const ConditionValueInput = ({
|
|||||||
return output.valueType;
|
return output.valueType;
|
||||||
}, [nodeList, variable]);
|
}, [nodeList, variable]);
|
||||||
|
|
||||||
if (valueType === WorkflowIOValueTypeEnum.boolean) {
|
const Render = useMemo(() => {
|
||||||
return (
|
if (valueType === WorkflowIOValueTypeEnum.boolean) {
|
||||||
<MySelect
|
return (
|
||||||
list={[
|
<MySelect
|
||||||
{ label: 'True', value: 'true' },
|
list={[
|
||||||
{ label: 'False', value: 'false' }
|
{ label: 'True', value: 'true' },
|
||||||
]}
|
{ label: 'False', value: 'false' }
|
||||||
onchange={onChange}
|
]}
|
||||||
value={value}
|
onchange={onChange}
|
||||||
placeholder={'选择值'}
|
value={value}
|
||||||
isDisabled={
|
placeholder={'选择值'}
|
||||||
condition === VariableConditionEnum.isEmpty ||
|
isDisabled={
|
||||||
condition === VariableConditionEnum.isNotEmpty
|
condition === VariableConditionEnum.isEmpty ||
|
||||||
}
|
condition === VariableConditionEnum.isNotEmpty
|
||||||
/>
|
}
|
||||||
);
|
/>
|
||||||
} else {
|
);
|
||||||
return (
|
} else {
|
||||||
<MyInput
|
return (
|
||||||
value={value}
|
<MyInput
|
||||||
placeholder={'输入值'}
|
value={value}
|
||||||
w={'100%'}
|
placeholder={'输入值'}
|
||||||
bg={'white'}
|
w={'100%'}
|
||||||
isDisabled={
|
bg={'white'}
|
||||||
condition === VariableConditionEnum.isEmpty ||
|
isDisabled={
|
||||||
condition === VariableConditionEnum.isNotEmpty
|
condition === VariableConditionEnum.isEmpty ||
|
||||||
}
|
condition === VariableConditionEnum.isNotEmpty
|
||||||
onChange={(e) => onChange(e.target.value)}
|
}
|
||||||
/>
|
onChange={(e) => onChange(e.target.value)}
|
||||||
);
|
/>
|
||||||
}
|
);
|
||||||
|
}
|
||||||
|
}, [condition, onChange, value, valueType]);
|
||||||
|
|
||||||
|
return Render;
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useCallback, useMemo, useState } from 'react';
|
import React, { useCallback, useMemo } from 'react';
|
||||||
import NodeCard from '../render/NodeCard';
|
import NodeCard from '../render/NodeCard';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { Box, Button, Flex } from '@chakra-ui/react';
|
import { Box, Button, Flex } from '@chakra-ui/react';
|
||||||
@ -9,7 +9,7 @@ import { IfElseListItemType } from '@fastgpt/global/core/workflow/template/syste
|
|||||||
import { useContextSelector } from 'use-context-selector';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
import { WorkflowContext } from '../../../context';
|
import { WorkflowContext } from '../../../context';
|
||||||
import Container from '../../components/Container';
|
import Container from '../../components/Container';
|
||||||
import DndDrag, { Draggable, DropResult } from '@fastgpt/web/components/common/DndDrag/index';
|
import DndDrag, { Draggable } from '@fastgpt/web/components/common/DndDrag/index';
|
||||||
import { SourceHandle } from '../render/Handle';
|
import { SourceHandle } from '../render/Handle';
|
||||||
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
|
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
|
||||||
import ListItem from './ListItem';
|
import ListItem from './ListItem';
|
||||||
@ -19,6 +19,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
|||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { nodeId, inputs = [] } = data;
|
const { nodeId, inputs = [] } = data;
|
||||||
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
|
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
|
||||||
|
const elseHandleId = getHandleId(nodeId, 'source', IfElseResultEnum.ELSE);
|
||||||
|
|
||||||
const ifElseList = useMemo(
|
const ifElseList = useMemo(
|
||||||
() =>
|
() =>
|
||||||
@ -49,7 +50,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
|||||||
<NodeCard selected={selected} maxW={'1000px'} {...data}>
|
<NodeCard selected={selected} maxW={'1000px'} {...data}>
|
||||||
<Box px={4} cursor={'default'}>
|
<Box px={4} cursor={'default'}>
|
||||||
<DndDrag<IfElseListItemType>
|
<DndDrag<IfElseListItemType>
|
||||||
onDragEndCb={(list) => onUpdateIfElseList(list)}
|
onDragEndCb={(list: IfElseListItemType[]) => onUpdateIfElseList(list)}
|
||||||
dataList={ifElseList}
|
dataList={ifElseList}
|
||||||
renderClone={(provided, snapshot, rubric) => (
|
renderClone={(provided, snapshot, rubric) => (
|
||||||
<ListItem
|
<ListItem
|
||||||
@ -95,7 +96,7 @@ const NodeIfElse = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
|||||||
</Box>
|
</Box>
|
||||||
<SourceHandle
|
<SourceHandle
|
||||||
nodeId={nodeId}
|
nodeId={nodeId}
|
||||||
handleId={getHandleId(nodeId, 'source', IfElseResultEnum.ELSE)}
|
handleId={elseHandleId}
|
||||||
position={Position.Right}
|
position={Position.Right}
|
||||||
translate={[26, 0]}
|
translate={[26, 0]}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useState } from 'react';
|
import React, { useCallback, useState } from 'react';
|
||||||
import { getPublishList, postRevertVersion } from '@/web/core/app/versionApi';
|
import { getPublishList, postRevertVersion } from '@/web/core/app/versionApi';
|
||||||
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
|
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
|
||||||
import CustomRightDrawer from '@fastgpt/web/components/common/MyDrawer/CustomRightDrawer';
|
import CustomRightDrawer from '@fastgpt/web/components/common/MyDrawer/CustomRightDrawer';
|
||||||
@ -14,6 +14,8 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
|
|||||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
||||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
|
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
|
||||||
|
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
||||||
|
|
||||||
const PublishHistoriesSlider = () => {
|
const PublishHistoriesSlider = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@ -45,29 +47,29 @@ const PublishHistoriesSlider = () => {
|
|||||||
setIsShowVersionHistories(false);
|
setIsShowVersionHistories(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
const onPreview = useMemoizedFn((data: AppVersionSchemaType) => {
|
const onPreview = useCallback((data: AppVersionSchemaType) => {
|
||||||
setSelectedHistoryId(data._id);
|
setSelectedHistoryId(data._id);
|
||||||
|
|
||||||
initData({
|
initData({
|
||||||
nodes: data.nodes,
|
nodes: data.nodes,
|
||||||
edges: data.edges
|
edges: data.edges
|
||||||
});
|
});
|
||||||
});
|
}, []);
|
||||||
const onCloseSlider = useMemoizedFn(() => {
|
const onCloseSlider = useCallback(
|
||||||
setSelectedHistoryId(undefined);
|
(data: { nodes: StoreNodeItemType[]; edges: StoreEdgeItemType[] }) => {
|
||||||
initData({
|
setSelectedHistoryId(undefined);
|
||||||
nodes: appDetail.modules,
|
initData(data);
|
||||||
edges: appDetail.edges
|
onClose();
|
||||||
});
|
},
|
||||||
onClose();
|
[appDetail]
|
||||||
});
|
);
|
||||||
|
|
||||||
const { mutate: onRevert, isLoading: isReverting } = useRequest({
|
const { mutate: onRevert, isLoading: isReverting } = useRequest({
|
||||||
mutationFn: async (data: AppVersionSchemaType) => {
|
mutationFn: async (data: AppVersionSchemaType) => {
|
||||||
if (!appId) return;
|
if (!appId) return;
|
||||||
await postRevertVersion(appId, {
|
await postRevertVersion(appId, {
|
||||||
versionId: data._id,
|
versionId: data._id,
|
||||||
editNodes: appDetail.modules,
|
editNodes: appDetail.modules, // old workflow
|
||||||
editEdges: appDetail.edges
|
editEdges: appDetail.edges
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -77,7 +79,7 @@ const PublishHistoriesSlider = () => {
|
|||||||
edges: data.edges
|
edges: data.edges
|
||||||
});
|
});
|
||||||
|
|
||||||
onCloseSlider();
|
onCloseSlider(data);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -86,7 +88,12 @@ const PublishHistoriesSlider = () => {
|
|||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<CustomRightDrawer
|
<CustomRightDrawer
|
||||||
onClose={onCloseSlider}
|
onClose={() =>
|
||||||
|
onCloseSlider({
|
||||||
|
nodes: appDetail.modules,
|
||||||
|
edges: appDetail.edges
|
||||||
|
})
|
||||||
|
}
|
||||||
iconSrc="core/workflow/versionHistories"
|
iconSrc="core/workflow/versionHistories"
|
||||||
title={t('core.workflow.publish.histories')}
|
title={t('core.workflow.publish.histories')}
|
||||||
maxW={'300px'}
|
maxW={'300px'}
|
||||||
|
|||||||
@ -430,8 +430,8 @@ const WorkflowContextProvider = ({
|
|||||||
|
|
||||||
const initData = useMemoizedFn(
|
const initData = useMemoizedFn(
|
||||||
async (e: { nodes: StoreNodeItemType[]; edges: StoreEdgeItemType[] }) => {
|
async (e: { nodes: StoreNodeItemType[]; edges: StoreEdgeItemType[] }) => {
|
||||||
setNodes(e.nodes?.map((item) => storeNode2FlowNode({ item })));
|
setNodes(e.nodes?.map((item) => storeNode2FlowNode({ item })) || []);
|
||||||
setEdges(e.edges?.map((item) => storeEdgesRenderEdge({ edge: item })));
|
setEdges(e.edges?.map((item) => storeEdgesRenderEdge({ edge: item })) || []);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
|||||||
/* ================= dataset ===================== */
|
/* ================= dataset ===================== */
|
||||||
export type CreateDatasetParams = {
|
export type CreateDatasetParams = {
|
||||||
parentId?: string;
|
parentId?: string;
|
||||||
type: `${DatasetTypeEnum}`;
|
type: DatasetTypeEnum;
|
||||||
name: string;
|
name: string;
|
||||||
intro: string;
|
intro: string;
|
||||||
avatar: string;
|
avatar: string;
|
||||||
@ -76,7 +76,7 @@ export type SearchTestResponse = {
|
|||||||
|
|
||||||
/* =========== training =========== */
|
/* =========== training =========== */
|
||||||
export type PostPreviewFilesChunksProps = {
|
export type PostPreviewFilesChunksProps = {
|
||||||
type: `${ImportDataSourceEnum}`;
|
type: ImportDataSourceEnum;
|
||||||
sourceId: string;
|
sourceId: string;
|
||||||
chunkSize: number;
|
chunkSize: number;
|
||||||
overlapRatio: number;
|
overlapRatio: number;
|
||||||
|
|||||||
@ -10,28 +10,22 @@ import I18nContextProvider from '@/web/context/I18n';
|
|||||||
import { useInitApp } from '@/web/context/useInitApp';
|
import { useInitApp } from '@/web/context/useInitApp';
|
||||||
|
|
||||||
import '@/web/styles/reset.scss';
|
import '@/web/styles/reset.scss';
|
||||||
|
import NextHead from '@/components/common/NextHead';
|
||||||
|
|
||||||
function App({ Component, pageProps }: AppProps) {
|
function App({ Component, pageProps }: AppProps) {
|
||||||
const { feConfigs, scripts, title } = useInitApp();
|
const { feConfigs, scripts, title } = useInitApp();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Head>
|
<NextHead
|
||||||
<title>{title}</title>
|
title={title}
|
||||||
<meta
|
desc={
|
||||||
name="description"
|
feConfigs?.systemDescription ||
|
||||||
content={
|
process.env.SYSTEM_DESCRIPTION ||
|
||||||
feConfigs?.systemDescription ||
|
`${title} 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!`
|
||||||
process.env.SYSTEM_DESCRIPTION ||
|
}
|
||||||
`${title} 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!`
|
icon={feConfigs?.favicon || process.env.SYSTEM_FAVICON}
|
||||||
}
|
/>
|
||||||
/>
|
|
||||||
<meta
|
|
||||||
name="viewport"
|
|
||||||
content="width=device-width,initial-scale=1.0,maximum-scale=1.0,minimum-scale=1.0,user-scalable=no, viewport-fit=cover"
|
|
||||||
/>
|
|
||||||
<link rel="icon" href={feConfigs.favicon || process.env.SYSTEM_FAVICON} />
|
|
||||||
</Head>
|
|
||||||
{scripts?.map((item, i) => <Script key={i} strategy="lazyOnload" {...item}></Script>)}
|
{scripts?.map((item, i) => <Script key={i} strategy="lazyOnload" {...item}></Script>)}
|
||||||
|
|
||||||
<QueryClientContext>
|
<QueryClientContext>
|
||||||
|
|||||||
@ -58,17 +58,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
try {
|
try {
|
||||||
await connectToDatabase();
|
await connectToDatabase();
|
||||||
await authCert({ req, authRoot: true });
|
await authCert({ req, authRoot: true });
|
||||||
|
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
try {
|
try {
|
||||||
console.log('执行脏数据清理任务');
|
console.log('执行脏数据清理任务');
|
||||||
// 360天 ~ 2小时前
|
// 360天 ~ 2小时前
|
||||||
const end = addHours(new Date(), -2);
|
const endTime = addHours(new Date(), start);
|
||||||
const start = addHours(new Date(), -360 * 24);
|
const startTime = addHours(new Date(), end);
|
||||||
await checkInvalidDatasetFiles(start, end);
|
await checkInvalidDatasetFiles(startTime, endTime);
|
||||||
await checkInvalidImg(start, end);
|
await checkInvalidImg(startTime, endTime);
|
||||||
await checkInvalidDatasetData(start, end);
|
await checkInvalidDatasetData(startTime, endTime);
|
||||||
await checkInvalidVector(start, end);
|
await checkInvalidVector(startTime, endTime);
|
||||||
console.log('执行脏数据清理任务完毕');
|
console.log('执行脏数据清理任务完毕');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('执行脏数据清理任务出错了');
|
console.log('执行脏数据清理任务出错了');
|
||||||
|
|||||||
@ -141,11 +141,18 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
|||||||
const collections = await connectionMongo.connection.db
|
const collections = await connectionMongo.connection.db
|
||||||
.listCollections({ name: 'team.members' })
|
.listCollections({ name: 'team.members' })
|
||||||
.toArray();
|
.toArray();
|
||||||
|
|
||||||
if (collections.length > 0) {
|
if (collections.length > 0) {
|
||||||
const sourceCol = connectionMongo.connection.db.collection('team.members');
|
const sourceCol = connectionMongo.connection.db.collection('team.members');
|
||||||
|
const targetCol = connectionMongo.connection.db.collection('team_members');
|
||||||
|
|
||||||
await sourceCol.rename('team_members', { dropTarget: true });
|
if ((await targetCol.countDocuments()) > 1) {
|
||||||
console.log('success rename team.members -> team_members');
|
// 除了root
|
||||||
|
console.log('team_members 中有数据,无法自动将 buffer.tts 迁移到 team_members,请手动操作');
|
||||||
|
} else {
|
||||||
|
await sourceCol.rename('team_members', { dropTarget: true });
|
||||||
|
console.log('success rename team.members -> team_members');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log('error: rename team.members -> team_members', error);
|
console.log('error: rename team.members -> team_members', error);
|
||||||
@ -170,6 +177,27 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
|||||||
console.log('error: rename team.tags -> team_tags', error);
|
console.log('error: rename team.tags -> team_tags', error);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const collections = await connectionMongo.connection.db
|
||||||
|
.listCollections({ name: 'team.subscriptions' })
|
||||||
|
.toArray();
|
||||||
|
if (collections.length > 0) {
|
||||||
|
const sourceCol = connectionMongo.connection.db.collection('team.subscriptions');
|
||||||
|
const targetCol = connectionMongo.connection.db.collection('team_subscriptions');
|
||||||
|
|
||||||
|
if ((await targetCol.countDocuments()) > 0) {
|
||||||
|
console.log(
|
||||||
|
'team_subscriptions 中有数据,无法自动将 team.subscriptions 迁移到 team_subscriptions,请手动操作'
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await sourceCol.rename('team_subscriptions', { dropTarget: true });
|
||||||
|
console.log('success rename team.subscriptions -> team_subscriptions');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.log('error: rename team.subscriptions -> team_subscriptions', error);
|
||||||
|
}
|
||||||
|
|
||||||
jsonRes(res, {
|
jsonRes(res, {
|
||||||
message: 'success'
|
message: 'success'
|
||||||
});
|
});
|
||||||
|
|||||||
@ -28,7 +28,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
|||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
api: {
|
api: {
|
||||||
sizeLimit: '10mb',
|
|
||||||
bodyParser: {
|
bodyParser: {
|
||||||
sizeLimit: '16mb'
|
sizeLimit: '16mb'
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,45 +1,31 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
||||||
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
|
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
|
||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { NextAPI } from '@/service/middle/entry';
|
||||||
|
|
||||||
/* get all dataset by teamId or tmbId */
|
/* get all dataset by teamId or tmbId */
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(
|
||||||
try {
|
req: NextApiRequest,
|
||||||
await connectToDatabase();
|
res: NextApiResponse<any>
|
||||||
// 凭证校验
|
): Promise<DatasetSimpleItemType[]> {
|
||||||
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
|
// 凭证校验
|
||||||
|
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
|
||||||
|
|
||||||
const datasets = await MongoDataset.find({
|
const datasets = await MongoDataset.find({
|
||||||
...mongoRPermission({ teamId, tmbId, role }),
|
...mongoRPermission({ teamId, tmbId, role }),
|
||||||
type: { $ne: DatasetTypeEnum.folder }
|
type: { $ne: DatasetTypeEnum.folder }
|
||||||
}).lean();
|
}).lean();
|
||||||
|
|
||||||
const data = datasets.map((item) => ({
|
return datasets.map((item) => ({
|
||||||
_id: item._id,
|
_id: item._id,
|
||||||
parentId: item.parentId,
|
avatar: item.avatar,
|
||||||
avatar: item.avatar,
|
name: item.name,
|
||||||
name: item.name,
|
vectorModel: getVectorModel(item.vectorModel)
|
||||||
intro: item.intro,
|
}));
|
||||||
type: item.type,
|
|
||||||
permission: item.permission,
|
|
||||||
vectorModel: getVectorModel(item.vectorModel),
|
|
||||||
canWrite: String(item.tmbId) === tmbId,
|
|
||||||
isOwner: teamOwner || String(item.tmbId) === tmbId
|
|
||||||
}));
|
|
||||||
|
|
||||||
jsonRes<DatasetListItemType[]>(res, {
|
|
||||||
data
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
|||||||
import { NextAPI } from '@/service/middle/entry';
|
import { NextAPI } from '@/service/middle/entry';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
|
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
|
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
|
||||||
req,
|
req,
|
||||||
|
|||||||
@ -8,8 +8,18 @@ import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
|||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
try {
|
try {
|
||||||
await connectToDatabase();
|
await connectToDatabase();
|
||||||
const { id, parentId, name, avatar, intro, permission, agentModel, websiteConfig, status } =
|
const {
|
||||||
req.body as DatasetUpdateBody;
|
id,
|
||||||
|
parentId,
|
||||||
|
name,
|
||||||
|
avatar,
|
||||||
|
intro,
|
||||||
|
permission,
|
||||||
|
agentModel,
|
||||||
|
websiteConfig,
|
||||||
|
externalReadUrl,
|
||||||
|
status
|
||||||
|
} = req.body as DatasetUpdateBody;
|
||||||
|
|
||||||
if (!id) {
|
if (!id) {
|
||||||
throw new Error('缺少参数');
|
throw new Error('缺少参数');
|
||||||
@ -33,7 +43,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
|||||||
...(agentModel && { agentModel: agentModel.model }),
|
...(agentModel && { agentModel: agentModel.model }),
|
||||||
...(websiteConfig && { websiteConfig }),
|
...(websiteConfig && { websiteConfig }),
|
||||||
...(status && { status }),
|
...(status && { status }),
|
||||||
...(intro && { intro })
|
...(intro && { intro }),
|
||||||
|
...(externalReadUrl && { externalReadUrl })
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import { PluginListItemType } from '@fastgpt/global/core/plugin/controller';
|
|||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
try {
|
try {
|
||||||
await connectToDatabase();
|
await connectToDatabase();
|
||||||
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
|
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||||
|
|
||||||
const { teamId } = await authCert({ req, authToken: true });
|
const { teamId } = await authCert({ req, authToken: true });
|
||||||
|
|
||||||
|
|||||||
@ -82,7 +82,7 @@ export default NextAPI(handler);
|
|||||||
export const config = {
|
export const config = {
|
||||||
api: {
|
api: {
|
||||||
bodyParser: {
|
bodyParser: {
|
||||||
sizeLimit: '10mb'
|
sizeLimit: '20mb'
|
||||||
},
|
},
|
||||||
responseLimit: '20mb'
|
responseLimit: '20mb'
|
||||||
}
|
}
|
||||||
|
|||||||
@ -43,6 +43,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
|||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
api: {
|
api: {
|
||||||
|
bodyParser: {
|
||||||
|
sizeLimit: '16mb'
|
||||||
|
},
|
||||||
responseLimit: '16mb'
|
responseLimit: '16mb'
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -523,6 +523,9 @@ const authHeaderRequest = async ({
|
|||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
api: {
|
api: {
|
||||||
|
bodyParser: {
|
||||||
|
sizeLimit: '20mb'
|
||||||
|
},
|
||||||
responseLimit: '20mb'
|
responseLimit: '20mb'
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -269,139 +269,141 @@ const OutLink = ({
|
|||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PageContainer
|
<>
|
||||||
{...(isEmbed
|
|
||||||
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
|
|
||||||
: { p: [0, 5] })}
|
|
||||||
>
|
|
||||||
<Head>
|
<Head>
|
||||||
<title>{appName || chatData.app?.name}</title>
|
<title>{appName || chatData.app?.name}</title>
|
||||||
<meta name="description" content={appIntro} />
|
<meta name="description" content={appIntro} />
|
||||||
<link rel="icon" href={appAvatar || chatData.app?.avatar} />
|
<link rel="icon" href={appAvatar || chatData.app?.avatar} />
|
||||||
</Head>
|
</Head>
|
||||||
<MyBox
|
<PageContainer
|
||||||
isLoading={isFetching}
|
{...(isEmbed
|
||||||
h={'100%'}
|
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
|
||||||
display={'flex'}
|
: { p: [0, 5] })}
|
||||||
flexDirection={['column', 'row']}
|
|
||||||
bg={'white'}
|
|
||||||
>
|
>
|
||||||
{showHistory === '1'
|
<MyBox
|
||||||
? ((children: React.ReactNode) => {
|
isLoading={isFetching}
|
||||||
return isPc ? (
|
h={'100%'}
|
||||||
<SideBar>{children}</SideBar>
|
display={'flex'}
|
||||||
) : (
|
flexDirection={['column', 'row']}
|
||||||
<Drawer
|
bg={'white'}
|
||||||
isOpen={isOpenSlider}
|
|
||||||
placement="left"
|
|
||||||
autoFocus={false}
|
|
||||||
size={'xs'}
|
|
||||||
onClose={onCloseSlider}
|
|
||||||
>
|
|
||||||
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
|
|
||||||
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
|
|
||||||
{children}
|
|
||||||
</DrawerContent>
|
|
||||||
</Drawer>
|
|
||||||
);
|
|
||||||
})(
|
|
||||||
<ChatHistorySlider
|
|
||||||
appName={chatData.app.name}
|
|
||||||
appAvatar={chatData.app.avatar}
|
|
||||||
confirmClearText={t('core.chat.Confirm to clear share chat history')}
|
|
||||||
activeChatId={chatId}
|
|
||||||
history={histories.map((item) => ({
|
|
||||||
id: item.chatId,
|
|
||||||
title: item.title,
|
|
||||||
customTitle: item.customTitle,
|
|
||||||
top: item.top
|
|
||||||
}))}
|
|
||||||
onClose={onCloseSlider}
|
|
||||||
onChangeChat={(chatId) => {
|
|
||||||
router.replace({
|
|
||||||
query: {
|
|
||||||
...router.query,
|
|
||||||
chatId: chatId || ''
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!isPc) {
|
|
||||||
onCloseSlider();
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
onDelHistory={({ chatId }) =>
|
|
||||||
delOneHistory({ appId: chatData.appId, chatId, shareId, outLinkUid })
|
|
||||||
}
|
|
||||||
onClearHistory={() => {
|
|
||||||
clearHistories({ shareId, outLinkUid });
|
|
||||||
router.replace({
|
|
||||||
query: {
|
|
||||||
...router.query,
|
|
||||||
chatId: ''
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
onSetHistoryTop={(e) => {
|
|
||||||
updateHistory({
|
|
||||||
...e,
|
|
||||||
appId: chatData.appId,
|
|
||||||
shareId,
|
|
||||||
outLinkUid
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
onSetCustomTitle={async (e) => {
|
|
||||||
updateHistory({
|
|
||||||
appId: chatData.appId,
|
|
||||||
chatId: e.chatId,
|
|
||||||
title: e.title,
|
|
||||||
customTitle: e.title,
|
|
||||||
shareId,
|
|
||||||
outLinkUid
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
: null}
|
|
||||||
|
|
||||||
{/* chat container */}
|
|
||||||
<Flex
|
|
||||||
position={'relative'}
|
|
||||||
h={[0, '100%']}
|
|
||||||
w={['100%', 0]}
|
|
||||||
flex={'1 0 0'}
|
|
||||||
flexDirection={'column'}
|
|
||||||
>
|
>
|
||||||
{/* header */}
|
{showHistory === '1'
|
||||||
<ChatHeader
|
? ((children: React.ReactNode) => {
|
||||||
appAvatar={chatData.app.avatar}
|
return isPc ? (
|
||||||
appName={chatData.app.name}
|
<SideBar>{children}</SideBar>
|
||||||
history={chatData.history}
|
) : (
|
||||||
showHistory={showHistory === '1'}
|
<Drawer
|
||||||
onOpenSlider={onOpenSlider}
|
isOpen={isOpenSlider}
|
||||||
/>
|
placement="left"
|
||||||
{/* chat box */}
|
autoFocus={false}
|
||||||
<Box flex={1}>
|
size={'xs'}
|
||||||
<ChatBox
|
onClose={onCloseSlider}
|
||||||
active={!!chatData.app.name}
|
>
|
||||||
ref={ChatBoxRef}
|
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
|
||||||
|
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
|
||||||
|
{children}
|
||||||
|
</DrawerContent>
|
||||||
|
</Drawer>
|
||||||
|
);
|
||||||
|
})(
|
||||||
|
<ChatHistorySlider
|
||||||
|
appName={chatData.app.name}
|
||||||
|
appAvatar={chatData.app.avatar}
|
||||||
|
confirmClearText={t('core.chat.Confirm to clear share chat history')}
|
||||||
|
activeChatId={chatId}
|
||||||
|
history={histories.map((item) => ({
|
||||||
|
id: item.chatId,
|
||||||
|
title: item.title,
|
||||||
|
customTitle: item.customTitle,
|
||||||
|
top: item.top
|
||||||
|
}))}
|
||||||
|
onClose={onCloseSlider}
|
||||||
|
onChangeChat={(chatId) => {
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
chatId: chatId || ''
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!isPc) {
|
||||||
|
onCloseSlider();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
onDelHistory={({ chatId }) =>
|
||||||
|
delOneHistory({ appId: chatData.appId, chatId, shareId, outLinkUid })
|
||||||
|
}
|
||||||
|
onClearHistory={() => {
|
||||||
|
clearHistories({ shareId, outLinkUid });
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
chatId: ''
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
onSetHistoryTop={(e) => {
|
||||||
|
updateHistory({
|
||||||
|
...e,
|
||||||
|
appId: chatData.appId,
|
||||||
|
shareId,
|
||||||
|
outLinkUid
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
onSetCustomTitle={async (e) => {
|
||||||
|
updateHistory({
|
||||||
|
appId: chatData.appId,
|
||||||
|
chatId: e.chatId,
|
||||||
|
title: e.title,
|
||||||
|
customTitle: e.title,
|
||||||
|
shareId,
|
||||||
|
outLinkUid
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
: null}
|
||||||
|
|
||||||
|
{/* chat container */}
|
||||||
|
<Flex
|
||||||
|
position={'relative'}
|
||||||
|
h={[0, '100%']}
|
||||||
|
w={['100%', 0]}
|
||||||
|
flex={'1 0 0'}
|
||||||
|
flexDirection={'column'}
|
||||||
|
>
|
||||||
|
{/* header */}
|
||||||
|
<ChatHeader
|
||||||
appAvatar={chatData.app.avatar}
|
appAvatar={chatData.app.avatar}
|
||||||
userAvatar={chatData.userAvatar}
|
appName={chatData.app.name}
|
||||||
userGuideModule={chatData.app?.userGuideModule}
|
history={chatData.history}
|
||||||
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
|
showHistory={showHistory === '1'}
|
||||||
feedbackType={'user'}
|
onOpenSlider={onOpenSlider}
|
||||||
onUpdateVariable={(e) => {}}
|
|
||||||
onStartChat={startChat}
|
|
||||||
onDelMessage={(e) =>
|
|
||||||
delOneHistoryItem({ ...e, appId: chatData.appId, chatId, shareId, outLinkUid })
|
|
||||||
}
|
|
||||||
appId={chatData.appId}
|
|
||||||
chatId={chatId}
|
|
||||||
shareId={shareId}
|
|
||||||
outLinkUid={outLinkUid}
|
|
||||||
/>
|
/>
|
||||||
</Box>
|
{/* chat box */}
|
||||||
</Flex>
|
<Box flex={1}>
|
||||||
</MyBox>
|
<ChatBox
|
||||||
</PageContainer>
|
active={!!chatData.app.name}
|
||||||
|
ref={ChatBoxRef}
|
||||||
|
appAvatar={chatData.app.avatar}
|
||||||
|
userAvatar={chatData.userAvatar}
|
||||||
|
userGuideModule={chatData.app?.userGuideModule}
|
||||||
|
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
|
||||||
|
feedbackType={'user'}
|
||||||
|
onUpdateVariable={(e) => {}}
|
||||||
|
onStartChat={startChat}
|
||||||
|
onDelMessage={(e) =>
|
||||||
|
delOneHistoryItem({ ...e, appId: chatData.appId, chatId, shareId, outLinkUid })
|
||||||
|
}
|
||||||
|
appId={chatData.appId}
|
||||||
|
chatId={chatId}
|
||||||
|
shareId={shareId}
|
||||||
|
outLinkUid={outLinkUid}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
</MyBox>
|
||||||
|
</PageContainer>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,158 @@
|
|||||||
|
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
||||||
|
import { Dispatch, ReactNode, SetStateAction, useEffect, useState } from 'react';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { createContext, useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
|
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||||
|
import { useDisclosure } from '@chakra-ui/react';
|
||||||
|
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
|
||||||
|
import { postCreateTrainingUsage } from '@/web/support/wallet/usage/api';
|
||||||
|
import { getDatasetCollections, postWebsiteSync } from '@/web/core/dataset/api';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
import { usePagination } from '@fastgpt/web/hooks/usePagination';
|
||||||
|
import { DatasetCollectionsListItemType } from '@/global/core/dataset/type';
|
||||||
|
import { useRouter } from 'next/router';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
|
const WebSiteConfigModal = dynamic(() => import('./WebsiteConfig'));
|
||||||
|
|
||||||
|
type CollectionPageContextType = {
|
||||||
|
openWebSyncConfirm: () => void;
|
||||||
|
onOpenWebsiteModal: () => void;
|
||||||
|
collections: DatasetCollectionsListItemType[];
|
||||||
|
Pagination: () => JSX.Element;
|
||||||
|
total: number;
|
||||||
|
getData: (e: number) => void;
|
||||||
|
isGetting: boolean;
|
||||||
|
pageNum: number;
|
||||||
|
pageSize: number;
|
||||||
|
searchText: string;
|
||||||
|
setSearchText: Dispatch<SetStateAction<string>>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const CollectionPageContext = createContext<CollectionPageContextType>({
|
||||||
|
openWebSyncConfirm: function (): () => void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
onOpenWebsiteModal: function (): void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
collections: [],
|
||||||
|
Pagination: function (): JSX.Element {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
total: 0,
|
||||||
|
getData: function (e: number): void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
isGetting: false,
|
||||||
|
pageNum: 0,
|
||||||
|
pageSize: 0,
|
||||||
|
searchText: '',
|
||||||
|
setSearchText: function (value: SetStateAction<string>): void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const CollectionPageContextProvider = ({ children }: { children: ReactNode }) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const router = useRouter();
|
||||||
|
const { parentId = '' } = router.query as { parentId: string };
|
||||||
|
|
||||||
|
const { datasetDetail, datasetId, updateDataset } = useContextSelector(
|
||||||
|
DatasetPageContext,
|
||||||
|
(v) => v
|
||||||
|
);
|
||||||
|
|
||||||
|
// website config
|
||||||
|
const { openConfirm: openWebSyncConfirm, ConfirmModal: ConfirmWebSyncModal } = useConfirm({
|
||||||
|
content: t('core.dataset.collection.Start Sync Tip')
|
||||||
|
});
|
||||||
|
const {
|
||||||
|
isOpen: isOpenWebsiteModal,
|
||||||
|
onOpen: onOpenWebsiteModal,
|
||||||
|
onClose: onCloseWebsiteModal
|
||||||
|
} = useDisclosure();
|
||||||
|
const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({
|
||||||
|
mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => {
|
||||||
|
onCloseWebsiteModal();
|
||||||
|
await checkTeamWebSyncLimit();
|
||||||
|
const billId = await postCreateTrainingUsage({
|
||||||
|
name: t('core.dataset.training.Website Sync'),
|
||||||
|
datasetId: datasetId
|
||||||
|
});
|
||||||
|
await postWebsiteSync({ datasetId: datasetId, billId });
|
||||||
|
|
||||||
|
await updateDataset({
|
||||||
|
id: datasetId,
|
||||||
|
websiteConfig,
|
||||||
|
status: DatasetStatusEnum.syncing
|
||||||
|
});
|
||||||
|
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
errorToast: t('common.Update Failed')
|
||||||
|
});
|
||||||
|
|
||||||
|
// collection list
|
||||||
|
const [searchText, setSearchText] = useState('');
|
||||||
|
const {
|
||||||
|
data: collections,
|
||||||
|
Pagination,
|
||||||
|
total,
|
||||||
|
getData,
|
||||||
|
isLoading: isGetting,
|
||||||
|
pageNum,
|
||||||
|
pageSize
|
||||||
|
} = usePagination<DatasetCollectionsListItemType>({
|
||||||
|
api: getDatasetCollections,
|
||||||
|
pageSize: 20,
|
||||||
|
params: {
|
||||||
|
datasetId,
|
||||||
|
parentId,
|
||||||
|
searchText
|
||||||
|
},
|
||||||
|
defaultRequest: false
|
||||||
|
});
|
||||||
|
useEffect(() => {
|
||||||
|
getData(1);
|
||||||
|
}, [parentId]);
|
||||||
|
|
||||||
|
const contextValue: CollectionPageContextType = {
|
||||||
|
openWebSyncConfirm: openWebSyncConfirm(onUpdateDatasetWebsiteConfig),
|
||||||
|
onOpenWebsiteModal,
|
||||||
|
|
||||||
|
searchText,
|
||||||
|
setSearchText,
|
||||||
|
collections,
|
||||||
|
Pagination,
|
||||||
|
total,
|
||||||
|
getData,
|
||||||
|
isGetting,
|
||||||
|
pageNum,
|
||||||
|
pageSize
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<CollectionPageContext.Provider value={contextValue}>
|
||||||
|
{children}
|
||||||
|
{datasetDetail.type === DatasetTypeEnum.websiteDataset && (
|
||||||
|
<>
|
||||||
|
{isOpenWebsiteModal && (
|
||||||
|
<WebSiteConfigModal
|
||||||
|
onClose={onCloseWebsiteModal}
|
||||||
|
onSuccess={onUpdateDatasetWebsiteConfig}
|
||||||
|
defaultValue={{
|
||||||
|
url: datasetDetail?.websiteConfig?.url,
|
||||||
|
selector: datasetDetail?.websiteConfig?.selector
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<ConfirmWebSyncModal />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</CollectionPageContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default CollectionPageContextProvider;
|
||||||
@ -0,0 +1,55 @@
|
|||||||
|
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
||||||
|
import React from 'react';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { Box, Flex } from '@chakra-ui/react';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { CollectionPageContext } from './Context';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
|
const EmptyCollectionTip = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const onOpenWebsiteModal = useContextSelector(CollectionPageContext, (v) => v.onOpenWebsiteModal);
|
||||||
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{(datasetDetail.type === DatasetTypeEnum.dataset ||
|
||||||
|
datasetDetail.type === DatasetTypeEnum.externalFile) && (
|
||||||
|
<EmptyTip text={t('core.dataset.collection.Empty Tip')} />
|
||||||
|
)}
|
||||||
|
{datasetDetail.type === DatasetTypeEnum.websiteDataset && (
|
||||||
|
<EmptyTip
|
||||||
|
text={
|
||||||
|
<Flex>
|
||||||
|
{datasetDetail.status === DatasetStatusEnum.syncing && (
|
||||||
|
<>{t('core.dataset.status.syncing')}</>
|
||||||
|
)}
|
||||||
|
{datasetDetail.status === DatasetStatusEnum.active && (
|
||||||
|
<>
|
||||||
|
{!datasetDetail?.websiteConfig?.url ? (
|
||||||
|
<>
|
||||||
|
{t('core.dataset.collection.Website Empty Tip')}
|
||||||
|
{', '}
|
||||||
|
<Box
|
||||||
|
textDecoration={'underline'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
onClick={onOpenWebsiteModal}
|
||||||
|
>
|
||||||
|
{t('core.dataset.collection.Click top config website')}
|
||||||
|
</Box>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>{t('core.dataset.website.UnValid Website Tip')}</>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default EmptyCollectionTip;
|
||||||
@ -0,0 +1,399 @@
|
|||||||
|
import React, { useCallback, useRef } from 'react';
|
||||||
|
import { Box, Flex, MenuButton, Button, Link, useTheme, useDisclosure } from '@chakra-ui/react';
|
||||||
|
import {
|
||||||
|
getDatasetCollectionPathById,
|
||||||
|
postDatasetCollection,
|
||||||
|
putDatasetCollectionById
|
||||||
|
} from '@/web/core/dataset/api';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { debounce } from 'lodash';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
|
import MyInput from '@/components/MyInput';
|
||||||
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
|
import { useRouter } from 'next/router';
|
||||||
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
|
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
||||||
|
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
|
||||||
|
import {
|
||||||
|
DatasetCollectionTypeEnum,
|
||||||
|
TrainingModeEnum,
|
||||||
|
DatasetTypeEnum,
|
||||||
|
DatasetTypeMap,
|
||||||
|
DatasetStatusEnum
|
||||||
|
} from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import EditFolderModal, { useEditFolder } from '../../../component/EditFolderModal';
|
||||||
|
import { TabEnum } from '../../index';
|
||||||
|
import ParentPath from '@/components/common/ParentPaths';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
import { useUserStore } from '@/web/support/user/useUserStore';
|
||||||
|
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||||
|
|
||||||
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { CollectionPageContext } from './Context';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
|
const FileSourceSelector = dynamic(() => import('../Import/components/FileSourceSelector'));
|
||||||
|
|
||||||
|
const Header = ({}: {}) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const theme = useTheme();
|
||||||
|
const { setLoading } = useSystemStore();
|
||||||
|
const { userInfo } = useUserStore();
|
||||||
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
|
||||||
|
const router = useRouter();
|
||||||
|
const { parentId = '' } = router.query as { parentId: string; datasetId: string };
|
||||||
|
const { isPc } = useSystemStore();
|
||||||
|
|
||||||
|
const lastSearch = useRef('');
|
||||||
|
const { searchText, setSearchText, total, getData, pageNum, onOpenWebsiteModal } =
|
||||||
|
useContextSelector(CollectionPageContext, (v) => v);
|
||||||
|
|
||||||
|
// change search
|
||||||
|
const debounceRefetch = useCallback(
|
||||||
|
debounce(() => {
|
||||||
|
getData(1);
|
||||||
|
lastSearch.current = searchText;
|
||||||
|
}, 300),
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
|
||||||
|
const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () =>
|
||||||
|
getDatasetCollectionPathById(parentId)
|
||||||
|
);
|
||||||
|
|
||||||
|
const { editFolderData, setEditFolderData } = useEditFolder();
|
||||||
|
const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } =
|
||||||
|
useEditTitle({
|
||||||
|
title: t('dataset.Create manual collection'),
|
||||||
|
tip: t('dataset.Manual collection Tip'),
|
||||||
|
canEmpty: false
|
||||||
|
});
|
||||||
|
const {
|
||||||
|
isOpen: isOpenFileSourceSelector,
|
||||||
|
onOpen: onOpenFileSourceSelector,
|
||||||
|
onClose: onCloseFileSourceSelector
|
||||||
|
} = useDisclosure();
|
||||||
|
const { mutate: onCreateCollection } = useRequest({
|
||||||
|
mutationFn: async ({
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
callback,
|
||||||
|
...props
|
||||||
|
}: {
|
||||||
|
name: string;
|
||||||
|
type: `${DatasetCollectionTypeEnum}`;
|
||||||
|
callback?: (id: string) => void;
|
||||||
|
trainingType?: TrainingModeEnum;
|
||||||
|
rawLink?: string;
|
||||||
|
chunkSize?: number;
|
||||||
|
}) => {
|
||||||
|
setLoading(true);
|
||||||
|
const id = await postDatasetCollection({
|
||||||
|
parentId,
|
||||||
|
datasetId: datasetDetail._id,
|
||||||
|
name,
|
||||||
|
type,
|
||||||
|
...props
|
||||||
|
});
|
||||||
|
callback?.(id);
|
||||||
|
return id;
|
||||||
|
},
|
||||||
|
onSuccess() {
|
||||||
|
getData(pageNum);
|
||||||
|
},
|
||||||
|
onSettled() {
|
||||||
|
setLoading(false);
|
||||||
|
},
|
||||||
|
|
||||||
|
successToast: t('common.Create Success'),
|
||||||
|
errorToast: t('common.Create Failed')
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Flex px={[2, 6]} alignItems={'flex-start'} h={'35px'}>
|
||||||
|
<Box flex={1}>
|
||||||
|
<ParentPath
|
||||||
|
paths={paths.map((path, i) => ({
|
||||||
|
parentId: path.parentId,
|
||||||
|
parentName: i === paths.length - 1 ? `${path.parentName}` : path.parentName
|
||||||
|
}))}
|
||||||
|
FirstPathDom={
|
||||||
|
<>
|
||||||
|
<Box fontWeight={'bold'} fontSize={['sm', 'lg']}>
|
||||||
|
{t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel)}({total})
|
||||||
|
</Box>
|
||||||
|
{datasetDetail?.websiteConfig?.url && (
|
||||||
|
<Flex fontSize={'sm'}>
|
||||||
|
{t('core.dataset.website.Base Url')}:
|
||||||
|
<Link
|
||||||
|
href={datasetDetail.websiteConfig.url}
|
||||||
|
target="_blank"
|
||||||
|
mr={2}
|
||||||
|
textDecoration={'underline'}
|
||||||
|
color={'primary.600'}
|
||||||
|
>
|
||||||
|
{datasetDetail.websiteConfig.url}
|
||||||
|
</Link>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
}
|
||||||
|
onClick={(e) => {
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
parentId: e
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
|
||||||
|
{/* search input */}
|
||||||
|
{isPc && (
|
||||||
|
<Flex alignItems={'center'} mr={4}>
|
||||||
|
<MyInput
|
||||||
|
bg={'myGray.50'}
|
||||||
|
w={['100%', '250px']}
|
||||||
|
size={'sm'}
|
||||||
|
h={'36px'}
|
||||||
|
placeholder={t('common.Search') || ''}
|
||||||
|
value={searchText}
|
||||||
|
leftIcon={
|
||||||
|
<MyIcon
|
||||||
|
name="common/searchLight"
|
||||||
|
position={'absolute'}
|
||||||
|
w={'16px'}
|
||||||
|
color={'myGray.500'}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSearchText(e.target.value);
|
||||||
|
debounceRefetch();
|
||||||
|
}}
|
||||||
|
onBlur={() => {
|
||||||
|
if (searchText === lastSearch.current) return;
|
||||||
|
getData(1);
|
||||||
|
}}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (searchText === lastSearch.current) return;
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
getData(1);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* diff collection button */}
|
||||||
|
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
|
||||||
|
<>
|
||||||
|
{datasetDetail?.type === DatasetTypeEnum.dataset && (
|
||||||
|
<MyMenu
|
||||||
|
offset={[0, 5]}
|
||||||
|
Button={
|
||||||
|
<MenuButton
|
||||||
|
_hover={{
|
||||||
|
color: 'primary.500'
|
||||||
|
}}
|
||||||
|
fontSize={['sm', 'md']}
|
||||||
|
>
|
||||||
|
<Flex
|
||||||
|
alignItems={'center'}
|
||||||
|
px={5}
|
||||||
|
py={2}
|
||||||
|
borderRadius={'md'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
bg={'primary.500'}
|
||||||
|
overflow={'hidden'}
|
||||||
|
color={'white'}
|
||||||
|
h={['28px', '35px']}
|
||||||
|
>
|
||||||
|
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
|
||||||
|
<Box>{t('dataset.collections.Create And Import')}</Box>
|
||||||
|
</Flex>
|
||||||
|
</MenuButton>
|
||||||
|
}
|
||||||
|
menuList={[
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
|
||||||
|
{t('Folder')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: () => setEditFolderData({})
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'core/dataset/manualCollection'} mr={2} w={'20px'} />
|
||||||
|
{t('core.dataset.Manual collection')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: () => {
|
||||||
|
onOpenCreateVirtualFileModal({
|
||||||
|
defaultVal: '',
|
||||||
|
onSuccess: (name) => {
|
||||||
|
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
|
||||||
|
{t('core.dataset.Text collection')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: onOpenFileSourceSelector
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'core/dataset/tableCollection'} mr={2} w={'20px'} />
|
||||||
|
{t('core.dataset.Table collection')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: () =>
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
currentTab: TabEnum.import,
|
||||||
|
source: ImportDataSourceEnum.csvTable
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{datasetDetail?.type === DatasetTypeEnum.websiteDataset && (
|
||||||
|
<>
|
||||||
|
{datasetDetail?.websiteConfig?.url ? (
|
||||||
|
<Flex alignItems={'center'}>
|
||||||
|
{datasetDetail.status === DatasetStatusEnum.active && (
|
||||||
|
<Button onClick={onOpenWebsiteModal}>{t('common.Config')}</Button>
|
||||||
|
)}
|
||||||
|
{datasetDetail.status === DatasetStatusEnum.syncing && (
|
||||||
|
<Flex
|
||||||
|
ml={3}
|
||||||
|
alignItems={'center'}
|
||||||
|
px={3}
|
||||||
|
py={1}
|
||||||
|
borderRadius="md"
|
||||||
|
border={theme.borders.base}
|
||||||
|
>
|
||||||
|
<Box
|
||||||
|
animation={'zoomStopIcon 0.5s infinite alternate'}
|
||||||
|
bg={'myGray.700'}
|
||||||
|
w="8px"
|
||||||
|
h="8px"
|
||||||
|
borderRadius={'50%'}
|
||||||
|
mt={'1px'}
|
||||||
|
></Box>
|
||||||
|
<Box ml={2} color={'myGray.600'}>
|
||||||
|
{t('core.dataset.status.syncing')}
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
) : (
|
||||||
|
<Button onClick={onOpenWebsiteModal}>{t('core.dataset.Set Website Config')}</Button>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{datasetDetail?.type === DatasetTypeEnum.externalFile && (
|
||||||
|
<MyMenu
|
||||||
|
offset={[0, 5]}
|
||||||
|
Button={
|
||||||
|
<MenuButton
|
||||||
|
_hover={{
|
||||||
|
color: 'primary.500'
|
||||||
|
}}
|
||||||
|
fontSize={['sm', 'md']}
|
||||||
|
>
|
||||||
|
<Flex
|
||||||
|
alignItems={'center'}
|
||||||
|
px={5}
|
||||||
|
py={2}
|
||||||
|
borderRadius={'md'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
bg={'primary.500'}
|
||||||
|
overflow={'hidden'}
|
||||||
|
color={'white'}
|
||||||
|
h={['28px', '35px']}
|
||||||
|
>
|
||||||
|
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
|
||||||
|
<Box>{t('dataset.collections.Create And Import')}</Box>
|
||||||
|
</Flex>
|
||||||
|
</MenuButton>
|
||||||
|
}
|
||||||
|
menuList={[
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
|
||||||
|
{t('Folder')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: () => setEditFolderData({})
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Flex>
|
||||||
|
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
|
||||||
|
{t('core.dataset.Text collection')}
|
||||||
|
</Flex>
|
||||||
|
),
|
||||||
|
onClick: () =>
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
currentTab: TabEnum.import,
|
||||||
|
source: ImportDataSourceEnum.externalFile
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* modal */}
|
||||||
|
{!!editFolderData && (
|
||||||
|
<EditFolderModal
|
||||||
|
onClose={() => setEditFolderData(undefined)}
|
||||||
|
editCallback={async (name) => {
|
||||||
|
try {
|
||||||
|
if (editFolderData.id) {
|
||||||
|
await putDatasetCollectionById({
|
||||||
|
id: editFolderData.id,
|
||||||
|
name
|
||||||
|
});
|
||||||
|
getData(pageNum);
|
||||||
|
} else {
|
||||||
|
onCreateCollection({
|
||||||
|
name,
|
||||||
|
type: DatasetCollectionTypeEnum.folder
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return Promise.reject(error);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
isEdit={!!editFolderData.id}
|
||||||
|
name={editFolderData.name}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<EditCreateVirtualFileModal iconSrc={'modal/manualDataset'} closeBtnText={''} />
|
||||||
|
{isOpenFileSourceSelector && <FileSourceSelector onClose={onCloseFileSourceSelector} />}
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Header;
|
||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useCallback, useState, useRef, useMemo, useEffect } from 'react';
|
import React, { useState, useRef, useMemo } from 'react';
|
||||||
import {
|
import {
|
||||||
Box,
|
Box,
|
||||||
Flex,
|
Flex,
|
||||||
@ -9,47 +9,29 @@ import {
|
|||||||
Th,
|
Th,
|
||||||
Td,
|
Td,
|
||||||
Tbody,
|
Tbody,
|
||||||
Image,
|
MenuButton
|
||||||
MenuButton,
|
|
||||||
useDisclosure,
|
|
||||||
Button,
|
|
||||||
Link,
|
|
||||||
useTheme
|
|
||||||
} from '@chakra-ui/react';
|
} from '@chakra-ui/react';
|
||||||
import {
|
import {
|
||||||
getDatasetCollections,
|
|
||||||
delDatasetCollectionById,
|
delDatasetCollectionById,
|
||||||
putDatasetCollectionById,
|
putDatasetCollectionById,
|
||||||
postDatasetCollection,
|
|
||||||
getDatasetCollectionPathById,
|
|
||||||
postLinkCollectionSync
|
postLinkCollectionSync
|
||||||
} from '@/web/core/dataset/api';
|
} from '@/web/core/dataset/api';
|
||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import { debounce } from 'lodash';
|
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import MyInput from '@/components/MyInput';
|
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
|
||||||
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
||||||
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
|
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
|
||||||
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
|
||||||
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
|
||||||
import {
|
import {
|
||||||
DatasetCollectionTypeEnum,
|
DatasetCollectionTypeEnum,
|
||||||
TrainingModeEnum,
|
|
||||||
DatasetTypeEnum,
|
|
||||||
DatasetTypeMap,
|
|
||||||
DatasetStatusEnum,
|
DatasetStatusEnum,
|
||||||
DatasetCollectionSyncResultMap
|
DatasetCollectionSyncResultMap
|
||||||
} from '@fastgpt/global/core/dataset/constants';
|
} from '@fastgpt/global/core/dataset/constants';
|
||||||
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
|
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
|
||||||
import EditFolderModal, { useEditFolder } from '../../component/EditFolderModal';
|
import { TabEnum } from '../../index';
|
||||||
import { TabEnum } from '..';
|
|
||||||
import ParentPath from '@/components/common/ParentPaths';
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useDrag } from '@/web/common/hooks/useDrag';
|
import { useDrag } from '@/web/common/hooks/useDrag';
|
||||||
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
|
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
|
||||||
@ -58,27 +40,22 @@ import MyTooltip from '@/components/MyTooltip';
|
|||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
import { useUserStore } from '@/web/support/user/useUserStore';
|
||||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
||||||
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
|
||||||
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import MyBox from '@fastgpt/web/components/common/MyBox';
|
import MyBox from '@fastgpt/web/components/common/MyBox';
|
||||||
import { usePagination } from '@fastgpt/web/hooks/usePagination';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { CollectionPageContext } from './Context';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
const WebSiteConfigModal = dynamic(() => import('./Import/WebsiteConfig'), {});
|
const Header = dynamic(() => import('./Header'));
|
||||||
const FileSourceSelector = dynamic(() => import('./Import/components/FileSourceSelector'), {});
|
const EmptyCollectionTip = dynamic(() => import('./EmptyCollectionTip'));
|
||||||
|
|
||||||
const CollectionCard = () => {
|
const CollectionCard = () => {
|
||||||
const BoxRef = useRef<HTMLDivElement>(null);
|
const BoxRef = useRef<HTMLDivElement>(null);
|
||||||
const lastSearch = useRef('');
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const theme = useTheme();
|
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { parentId = '', datasetId } = router.query as { parentId: string; datasetId: string };
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { isPc } = useSystemStore();
|
|
||||||
const { userInfo } = useUserStore();
|
const { userInfo } = useUserStore();
|
||||||
const [searchText, setSearchText] = useState('');
|
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
|
||||||
const { datasetDetail, updateDataset, startWebsiteSync, loadDatasetDetail } = useDatasetStore();
|
|
||||||
|
|
||||||
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
|
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
|
||||||
content: t('dataset.Confirm to delete the file'),
|
content: t('dataset.Confirm to delete the file'),
|
||||||
@ -88,66 +65,18 @@ const CollectionCard = () => {
|
|||||||
content: t('core.dataset.collection.Start Sync Tip')
|
content: t('core.dataset.collection.Start Sync Tip')
|
||||||
});
|
});
|
||||||
|
|
||||||
const {
|
|
||||||
isOpen: isOpenFileSourceSelector,
|
|
||||||
onOpen: onOpenFileSourceSelector,
|
|
||||||
onClose: onCloseFileSourceSelector
|
|
||||||
} = useDisclosure();
|
|
||||||
const {
|
|
||||||
isOpen: isOpenWebsiteModal,
|
|
||||||
onOpen: onOpenWebsiteModal,
|
|
||||||
onClose: onCloseWebsiteModal
|
|
||||||
} = useDisclosure();
|
|
||||||
const { onOpenModal: onOpenCreateVirtualFileModal, EditModal: EditCreateVirtualFileModal } =
|
|
||||||
useEditTitle({
|
|
||||||
title: t('dataset.Create manual collection'),
|
|
||||||
tip: t('dataset.Manual collection Tip'),
|
|
||||||
canEmpty: false
|
|
||||||
});
|
|
||||||
|
|
||||||
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
|
const { onOpenModal: onOpenEditTitleModal, EditModal: EditTitleModal } = useEditTitle({
|
||||||
title: t('Rename')
|
title: t('Rename')
|
||||||
});
|
});
|
||||||
|
|
||||||
const { editFolderData, setEditFolderData } = useEditFolder();
|
|
||||||
const [moveCollectionData, setMoveCollectionData] = useState<{ collectionId: string }>();
|
const [moveCollectionData, setMoveCollectionData] = useState<{ collectionId: string }>();
|
||||||
|
|
||||||
const {
|
const { collections, Pagination, total, getData, isGetting, pageNum, pageSize } =
|
||||||
data: collections,
|
useContextSelector(CollectionPageContext, (v) => v);
|
||||||
Pagination,
|
|
||||||
total,
|
|
||||||
getData,
|
|
||||||
isLoading: isGetting,
|
|
||||||
pageNum,
|
|
||||||
pageSize
|
|
||||||
} = usePagination<DatasetCollectionsListItemType>({
|
|
||||||
api: getDatasetCollections,
|
|
||||||
pageSize: 20,
|
|
||||||
params: {
|
|
||||||
datasetId,
|
|
||||||
parentId,
|
|
||||||
searchText
|
|
||||||
},
|
|
||||||
defaultRequest: false,
|
|
||||||
onChange() {
|
|
||||||
if (BoxRef.current) {
|
|
||||||
BoxRef.current.scrollTop = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const { dragStartId, setDragStartId, dragTargetId, setDragTargetId } = useDrag();
|
const { dragStartId, setDragStartId, dragTargetId, setDragTargetId } = useDrag();
|
||||||
|
|
||||||
// change search
|
// Ad file status icon
|
||||||
const debounceRefetch = useCallback(
|
|
||||||
debounce(() => {
|
|
||||||
getData(1);
|
|
||||||
lastSearch.current = searchText;
|
|
||||||
}, 300),
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
|
|
||||||
// add file icon
|
|
||||||
const formatCollections = useMemo(
|
const formatCollections = useMemo(
|
||||||
() =>
|
() =>
|
||||||
collections.map((collection) => {
|
collections.map((collection) => {
|
||||||
@ -180,37 +109,6 @@ const CollectionCard = () => {
|
|||||||
[collections, t]
|
[collections, t]
|
||||||
);
|
);
|
||||||
|
|
||||||
const { mutate: onCreateCollection, isLoading: isCreating } = useRequest({
|
|
||||||
mutationFn: async ({
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
callback,
|
|
||||||
...props
|
|
||||||
}: {
|
|
||||||
name: string;
|
|
||||||
type: `${DatasetCollectionTypeEnum}`;
|
|
||||||
callback?: (id: string) => void;
|
|
||||||
trainingType?: `${TrainingModeEnum}`;
|
|
||||||
rawLink?: string;
|
|
||||||
chunkSize?: number;
|
|
||||||
}) => {
|
|
||||||
const id = await postDatasetCollection({
|
|
||||||
parentId,
|
|
||||||
datasetId,
|
|
||||||
name,
|
|
||||||
type,
|
|
||||||
...props
|
|
||||||
});
|
|
||||||
callback?.(id);
|
|
||||||
return id;
|
|
||||||
},
|
|
||||||
onSuccess() {
|
|
||||||
getData(pageNum);
|
|
||||||
},
|
|
||||||
|
|
||||||
successToast: t('common.Create Success'),
|
|
||||||
errorToast: t('common.Create Failed')
|
|
||||||
});
|
|
||||||
const { mutate: onUpdateCollectionName } = useRequest({
|
const { mutate: onUpdateCollectionName } = useRequest({
|
||||||
mutationFn: ({ collectionId, name }: { collectionId: string; name: string }) => {
|
mutationFn: ({ collectionId, name }: { collectionId: string; name: string }) => {
|
||||||
return putDatasetCollectionById({
|
return putDatasetCollectionById({
|
||||||
@ -237,17 +135,7 @@ const CollectionCard = () => {
|
|||||||
successToast: t('common.Delete Success'),
|
successToast: t('common.Delete Success'),
|
||||||
errorToast: t('common.Delete Failed')
|
errorToast: t('common.Delete Failed')
|
||||||
});
|
});
|
||||||
const { mutate: onUpdateDatasetWebsiteConfig, isLoading: isUpdating } = useRequest({
|
|
||||||
mutationFn: async (websiteConfig: DatasetSchemaType['websiteConfig']) => {
|
|
||||||
onCloseWebsiteModal();
|
|
||||||
await updateDataset({
|
|
||||||
id: datasetDetail._id,
|
|
||||||
websiteConfig
|
|
||||||
});
|
|
||||||
return startWebsiteSync();
|
|
||||||
},
|
|
||||||
errorToast: t('common.Update Failed')
|
|
||||||
});
|
|
||||||
const { mutate: onclickStartSync, isLoading: isSyncing } = useRequest({
|
const { mutate: onclickStartSync, isLoading: isSyncing } = useRequest({
|
||||||
mutationFn: (collectionId: string) => {
|
mutationFn: (collectionId: string) => {
|
||||||
return postLinkCollectionSync(collectionId);
|
return postLinkCollectionSync(collectionId);
|
||||||
@ -262,22 +150,13 @@ const CollectionCard = () => {
|
|||||||
errorToast: t('core.dataset.error.Start Sync Failed')
|
errorToast: t('core.dataset.error.Start Sync Failed')
|
||||||
});
|
});
|
||||||
|
|
||||||
const { data: paths = [] } = useQuery(['getDatasetCollectionPathById', parentId], () =>
|
|
||||||
getDatasetCollectionPathById(parentId)
|
|
||||||
);
|
|
||||||
|
|
||||||
const hasTrainingData = useMemo(
|
const hasTrainingData = useMemo(
|
||||||
() => !!formatCollections.find((item) => item.trainingAmount > 0),
|
() => !!formatCollections.find((item) => item.trainingAmount > 0),
|
||||||
[formatCollections]
|
[formatCollections]
|
||||||
);
|
);
|
||||||
const isLoading = useMemo(
|
const isLoading = useMemo(
|
||||||
() =>
|
() => isDeleting || isSyncing || (isGetting && collections.length === 0),
|
||||||
isCreating ||
|
[collections.length, isDeleting, isGetting, isSyncing]
|
||||||
isDeleting ||
|
|
||||||
isUpdating ||
|
|
||||||
isSyncing ||
|
|
||||||
(isGetting && collections.length === 0),
|
|
||||||
[collections.length, isCreating, isDeleting, isGetting, isSyncing, isUpdating]
|
|
||||||
);
|
);
|
||||||
|
|
||||||
useQuery(
|
useQuery(
|
||||||
@ -285,7 +164,7 @@ const CollectionCard = () => {
|
|||||||
() => {
|
() => {
|
||||||
getData(1);
|
getData(1);
|
||||||
if (datasetDetail.status === DatasetStatusEnum.syncing) {
|
if (datasetDetail.status === DatasetStatusEnum.syncing) {
|
||||||
loadDatasetDetail(datasetId, true);
|
loadDatasetDetail(datasetDetail._id);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
},
|
},
|
||||||
@ -295,207 +174,11 @@ const CollectionCard = () => {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
getData(1);
|
|
||||||
}, [parentId]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<MyBox isLoading={isLoading} h={'100%'} py={[2, 4]}>
|
<MyBox isLoading={isLoading} h={'100%'} py={[2, 4]}>
|
||||||
<Flex ref={BoxRef} flexDirection={'column'} py={[1, 3]} h={'100%'}>
|
<Flex ref={BoxRef} flexDirection={'column'} py={[1, 3]} h={'100%'}>
|
||||||
{/* header */}
|
{/* header */}
|
||||||
<Flex px={[2, 6]} alignItems={'flex-start'} h={'35px'}>
|
<Header />
|
||||||
<Box flex={1}>
|
|
||||||
<ParentPath
|
|
||||||
paths={paths.map((path, i) => ({
|
|
||||||
parentId: path.parentId,
|
|
||||||
parentName: i === paths.length - 1 ? `${path.parentName}` : path.parentName
|
|
||||||
}))}
|
|
||||||
FirstPathDom={
|
|
||||||
<>
|
|
||||||
<Box fontWeight={'bold'} fontSize={['sm', 'lg']}>
|
|
||||||
{t(DatasetTypeMap[datasetDetail?.type]?.collectionLabel)}({total})
|
|
||||||
</Box>
|
|
||||||
{datasetDetail?.websiteConfig?.url && (
|
|
||||||
<Flex fontSize={'sm'}>
|
|
||||||
{t('core.dataset.website.Base Url')}:
|
|
||||||
<Link
|
|
||||||
href={datasetDetail.websiteConfig.url}
|
|
||||||
target="_blank"
|
|
||||||
mr={2}
|
|
||||||
textDecoration={'underline'}
|
|
||||||
color={'primary.600'}
|
|
||||||
>
|
|
||||||
{datasetDetail.websiteConfig.url}
|
|
||||||
</Link>
|
|
||||||
</Flex>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
}
|
|
||||||
onClick={(e) => {
|
|
||||||
router.replace({
|
|
||||||
query: {
|
|
||||||
...router.query,
|
|
||||||
parentId: e
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
|
|
||||||
{isPc && (
|
|
||||||
<Flex alignItems={'center'} mr={4}>
|
|
||||||
<MyInput
|
|
||||||
bg={'myGray.50'}
|
|
||||||
w={['100%', '250px']}
|
|
||||||
size={'sm'}
|
|
||||||
h={'36px'}
|
|
||||||
placeholder={t('common.Search') || ''}
|
|
||||||
value={searchText}
|
|
||||||
leftIcon={
|
|
||||||
<MyIcon
|
|
||||||
name="common/searchLight"
|
|
||||||
position={'absolute'}
|
|
||||||
w={'16px'}
|
|
||||||
color={'myGray.500'}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
onChange={(e) => {
|
|
||||||
setSearchText(e.target.value);
|
|
||||||
debounceRefetch();
|
|
||||||
}}
|
|
||||||
onBlur={() => {
|
|
||||||
if (searchText === lastSearch.current) return;
|
|
||||||
getData(1);
|
|
||||||
}}
|
|
||||||
onKeyDown={(e) => {
|
|
||||||
if (searchText === lastSearch.current) return;
|
|
||||||
if (e.key === 'Enter') {
|
|
||||||
getData(1);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</Flex>
|
|
||||||
)}
|
|
||||||
{datasetDetail?.type === DatasetTypeEnum.dataset && (
|
|
||||||
<>
|
|
||||||
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
|
|
||||||
<MyMenu
|
|
||||||
offset={[0, 5]}
|
|
||||||
Button={
|
|
||||||
<MenuButton
|
|
||||||
_hover={{
|
|
||||||
color: 'primary.500'
|
|
||||||
}}
|
|
||||||
fontSize={['sm', 'md']}
|
|
||||||
>
|
|
||||||
<Flex
|
|
||||||
alignItems={'center'}
|
|
||||||
px={5}
|
|
||||||
py={2}
|
|
||||||
borderRadius={'md'}
|
|
||||||
cursor={'pointer'}
|
|
||||||
bg={'primary.500'}
|
|
||||||
overflow={'hidden'}
|
|
||||||
color={'white'}
|
|
||||||
h={['28px', '35px']}
|
|
||||||
>
|
|
||||||
<MyIcon name={'common/importLight'} mr={2} w={'14px'} />
|
|
||||||
<Box>{t('dataset.collections.Create And Import')}</Box>
|
|
||||||
</Flex>
|
|
||||||
</MenuButton>
|
|
||||||
}
|
|
||||||
menuList={[
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex>
|
|
||||||
<MyIcon name={'common/folderFill'} w={'20px'} mr={2} />
|
|
||||||
{t('Folder')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => setEditFolderData({})
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex>
|
|
||||||
<MyIcon name={'core/dataset/manualCollection'} mr={2} w={'20px'} />
|
|
||||||
{t('core.dataset.Manual collection')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => {
|
|
||||||
onOpenCreateVirtualFileModal({
|
|
||||||
defaultVal: '',
|
|
||||||
onSuccess: (name) => {
|
|
||||||
onCreateCollection({ name, type: DatasetCollectionTypeEnum.virtual });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex>
|
|
||||||
<MyIcon name={'core/dataset/fileCollection'} mr={2} w={'20px'} />
|
|
||||||
{t('core.dataset.Text collection')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: onOpenFileSourceSelector
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex>
|
|
||||||
<MyIcon name={'core/dataset/tableCollection'} mr={2} w={'20px'} />
|
|
||||||
{t('core.dataset.Table collection')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () =>
|
|
||||||
router.replace({
|
|
||||||
query: {
|
|
||||||
...router.query,
|
|
||||||
currentTab: TabEnum.import,
|
|
||||||
source: ImportDataSourceEnum.csvTable
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
{datasetDetail?.type === DatasetTypeEnum.websiteDataset && (
|
|
||||||
<>
|
|
||||||
{datasetDetail?.websiteConfig?.url ? (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
{datasetDetail.status === DatasetStatusEnum.active && (
|
|
||||||
<Button onClick={onOpenWebsiteModal}>{t('common.Config')}</Button>
|
|
||||||
)}
|
|
||||||
{datasetDetail.status === DatasetStatusEnum.syncing && (
|
|
||||||
<Flex
|
|
||||||
ml={3}
|
|
||||||
alignItems={'center'}
|
|
||||||
px={3}
|
|
||||||
py={1}
|
|
||||||
borderRadius="md"
|
|
||||||
border={theme.borders.base}
|
|
||||||
>
|
|
||||||
<Box
|
|
||||||
animation={'zoomStopIcon 0.5s infinite alternate'}
|
|
||||||
bg={'myGray.700'}
|
|
||||||
w="8px"
|
|
||||||
h="8px"
|
|
||||||
borderRadius={'50%'}
|
|
||||||
mt={'1px'}
|
|
||||||
></Box>
|
|
||||||
<Box ml={2} color={'myGray.600'}>
|
|
||||||
{t('core.dataset.status.syncing')}
|
|
||||||
</Box>
|
|
||||||
</Flex>
|
|
||||||
)}
|
|
||||||
</Flex>
|
|
||||||
) : (
|
|
||||||
<Button onClick={onOpenWebsiteModal}>{t('core.dataset.Set Website Config')}</Button>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Flex>
|
|
||||||
|
|
||||||
{/* collection table */}
|
{/* collection table */}
|
||||||
<TableContainer
|
<TableContainer
|
||||||
@ -731,86 +414,16 @@ const CollectionCard = () => {
|
|||||||
<Pagination />
|
<Pagination />
|
||||||
</Flex>
|
</Flex>
|
||||||
)}
|
)}
|
||||||
{total === 0 && (
|
{total === 0 && <EmptyCollectionTip />}
|
||||||
<EmptyTip
|
|
||||||
text={
|
|
||||||
datasetDetail.type === DatasetTypeEnum.dataset ? (
|
|
||||||
t('core.dataset.collection.Empty Tip')
|
|
||||||
) : (
|
|
||||||
<Flex>
|
|
||||||
{datasetDetail.status === DatasetStatusEnum.syncing && (
|
|
||||||
<>{t('core.dataset.status.syncing')}</>
|
|
||||||
)}
|
|
||||||
{datasetDetail.status === DatasetStatusEnum.active && (
|
|
||||||
<>
|
|
||||||
{!datasetDetail?.websiteConfig?.url ? (
|
|
||||||
<>
|
|
||||||
{t('core.dataset.collection.Website Empty Tip')}
|
|
||||||
{', '}
|
|
||||||
<Box
|
|
||||||
textDecoration={'underline'}
|
|
||||||
cursor={'pointer'}
|
|
||||||
onClick={onOpenWebsiteModal}
|
|
||||||
>
|
|
||||||
{t('core.dataset.collection.Click top config website')}
|
|
||||||
</Box>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>{t('core.dataset.website.UnValid Website Tip')}</>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</Flex>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</TableContainer>
|
</TableContainer>
|
||||||
|
|
||||||
<ConfirmDeleteModal />
|
<ConfirmDeleteModal />
|
||||||
<ConfirmSyncModal />
|
<ConfirmSyncModal />
|
||||||
<EditTitleModal />
|
<EditTitleModal />
|
||||||
<EditCreateVirtualFileModal iconSrc={'modal/manualDataset'} closeBtnText={''} />
|
|
||||||
{/* {isOpenFileImportModal && (
|
|
||||||
<FileImportModal
|
|
||||||
datasetId={datasetId}
|
|
||||||
parentId={parentId}
|
|
||||||
uploadSuccess={() => {
|
|
||||||
getData(1);
|
|
||||||
onCloseFileImportModal();
|
|
||||||
}}
|
|
||||||
onClose={onCloseFileImportModal}
|
|
||||||
/>
|
|
||||||
)} */}
|
|
||||||
{isOpenFileSourceSelector && <FileSourceSelector onClose={onCloseFileSourceSelector} />}
|
|
||||||
{!!editFolderData && (
|
|
||||||
<EditFolderModal
|
|
||||||
onClose={() => setEditFolderData(undefined)}
|
|
||||||
editCallback={async (name) => {
|
|
||||||
try {
|
|
||||||
if (editFolderData.id) {
|
|
||||||
await putDatasetCollectionById({
|
|
||||||
id: editFolderData.id,
|
|
||||||
name
|
|
||||||
});
|
|
||||||
getData(pageNum);
|
|
||||||
} else {
|
|
||||||
onCreateCollection({
|
|
||||||
name,
|
|
||||||
type: DatasetCollectionTypeEnum.folder
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
return Promise.reject(error);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
isEdit={!!editFolderData.id}
|
|
||||||
name={editFolderData.name}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
{!!moveCollectionData && (
|
{!!moveCollectionData && (
|
||||||
<SelectCollections
|
<SelectCollections
|
||||||
datasetId={datasetId}
|
datasetId={datasetDetail._id}
|
||||||
type="folder"
|
type="folder"
|
||||||
defaultSelectedId={[moveCollectionData.collectionId]}
|
defaultSelectedId={[moveCollectionData.collectionId]}
|
||||||
onClose={() => setMoveCollectionData(undefined)}
|
onClose={() => setMoveCollectionData(undefined)}
|
||||||
@ -828,16 +441,6 @@ const CollectionCard = () => {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{isOpenWebsiteModal && (
|
|
||||||
<WebSiteConfigModal
|
|
||||||
onClose={onCloseWebsiteModal}
|
|
||||||
onSuccess={onUpdateDatasetWebsiteConfig}
|
|
||||||
defaultValue={{
|
|
||||||
url: datasetDetail?.websiteConfig?.url,
|
|
||||||
selector: datasetDetail?.websiteConfig?.selector
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</Flex>
|
</Flex>
|
||||||
</MyBox>
|
</MyBox>
|
||||||
);
|
);
|
||||||
@ -0,0 +1,302 @@
|
|||||||
|
import { useRouter } from 'next/router';
|
||||||
|
import { SetStateAction, useState } from 'react';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { createContext, useContextSelector } from 'use-context-selector';
|
||||||
|
import { ImportDataSourceEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { useMyStep } from '@fastgpt/web/hooks/useStep';
|
||||||
|
import { Box, Button, Flex, IconButton } from '@chakra-ui/react';
|
||||||
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
|
import { TabEnum } from '../Slider';
|
||||||
|
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
|
||||||
|
import { UseFormReturn, useForm } from 'react-hook-form';
|
||||||
|
import { ImportSourceItemType } from '@/web/core/dataset/type';
|
||||||
|
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
|
type TrainingFiledType = {
|
||||||
|
chunkOverlapRatio: number;
|
||||||
|
maxChunkSize: number;
|
||||||
|
minChunkSize: number;
|
||||||
|
autoChunkSize: number;
|
||||||
|
chunkSize: number;
|
||||||
|
showChunkInput: boolean;
|
||||||
|
showPromptInput: boolean;
|
||||||
|
charsPointsPrice: number;
|
||||||
|
priceTip: string;
|
||||||
|
uploadRate: number;
|
||||||
|
chunkSizeField?: ChunkSizeFieldType;
|
||||||
|
};
|
||||||
|
type DatasetImportContextType = {
|
||||||
|
importSource: ImportDataSourceEnum;
|
||||||
|
parentId: string | undefined;
|
||||||
|
activeStep: number;
|
||||||
|
goToNext: () => void;
|
||||||
|
|
||||||
|
processParamsForm: UseFormReturn<ImportFormType, any>;
|
||||||
|
sources: ImportSourceItemType[];
|
||||||
|
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
|
||||||
|
} & TrainingFiledType;
|
||||||
|
|
||||||
|
type ChunkSizeFieldType = 'embeddingChunkSize';
|
||||||
|
export type ImportFormType = {
|
||||||
|
mode: TrainingModeEnum;
|
||||||
|
way: ImportProcessWayEnum;
|
||||||
|
embeddingChunkSize: number;
|
||||||
|
customSplitChar: string;
|
||||||
|
qaPrompt: string;
|
||||||
|
webSelector: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DatasetImportContext = createContext<DatasetImportContextType>({
|
||||||
|
importSource: ImportDataSourceEnum.fileLocal,
|
||||||
|
goToNext: function (): void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
activeStep: 0,
|
||||||
|
parentId: undefined,
|
||||||
|
|
||||||
|
maxChunkSize: 0,
|
||||||
|
minChunkSize: 0,
|
||||||
|
showChunkInput: false,
|
||||||
|
showPromptInput: false,
|
||||||
|
sources: [],
|
||||||
|
setSources: function (value: SetStateAction<ImportSourceItemType[]>): void {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
chunkSize: 0,
|
||||||
|
chunkOverlapRatio: 0,
|
||||||
|
uploadRate: 0,
|
||||||
|
//@ts-ignore
|
||||||
|
processParamsForm: undefined,
|
||||||
|
autoChunkSize: 0,
|
||||||
|
charsPointsPrice: 0,
|
||||||
|
priceTip: ''
|
||||||
|
});
|
||||||
|
|
||||||
|
const DatasetImportContextProvider = ({ children }: { children: React.ReactNode }) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const router = useRouter();
|
||||||
|
const { source = ImportDataSourceEnum.fileLocal, parentId } = (router.query || {}) as {
|
||||||
|
source: ImportDataSourceEnum;
|
||||||
|
parentId?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
|
||||||
|
// step
|
||||||
|
const modeSteps: Record<ImportDataSourceEnum, { title: string }[]> = {
|
||||||
|
[ImportDataSourceEnum.fileLocal]: [
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Select file')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Data Preprocessing')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Upload data')
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[ImportDataSourceEnum.fileLink]: [
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Select file')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Data Preprocessing')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Upload data')
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[ImportDataSourceEnum.fileCustom]: [
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Select file')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Data Preprocessing')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Upload data')
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[ImportDataSourceEnum.csvTable]: [
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Select file')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Data Preprocessing')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Upload data')
|
||||||
|
}
|
||||||
|
],
|
||||||
|
[ImportDataSourceEnum.externalFile]: [
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Select file')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Data Preprocessing')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: t('core.dataset.import.Upload data')
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
const steps = modeSteps[source];
|
||||||
|
const { activeStep, goToNext, goToPrevious, MyStep } = useMyStep({
|
||||||
|
defaultStep: 0,
|
||||||
|
steps
|
||||||
|
});
|
||||||
|
|
||||||
|
// -----
|
||||||
|
const vectorModel = datasetDetail.vectorModel;
|
||||||
|
const agentModel = datasetDetail.agentModel;
|
||||||
|
|
||||||
|
const processParamsForm = useForm<ImportFormType>({
|
||||||
|
defaultValues: {
|
||||||
|
mode: TrainingModeEnum.chunk,
|
||||||
|
way: ImportProcessWayEnum.auto,
|
||||||
|
embeddingChunkSize: vectorModel?.defaultToken || 512,
|
||||||
|
customSplitChar: '',
|
||||||
|
qaPrompt: Prompt_AgentQA.description,
|
||||||
|
webSelector: ''
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
|
||||||
|
|
||||||
|
// watch form
|
||||||
|
const mode = processParamsForm.watch('mode');
|
||||||
|
const way = processParamsForm.watch('way');
|
||||||
|
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
|
||||||
|
const customSplitChar = processParamsForm.watch('customSplitChar');
|
||||||
|
|
||||||
|
const modeStaticParams: Record<TrainingModeEnum, TrainingFiledType> = {
|
||||||
|
[TrainingModeEnum.auto]: {
|
||||||
|
chunkOverlapRatio: 0.2,
|
||||||
|
maxChunkSize: 2048,
|
||||||
|
minChunkSize: 100,
|
||||||
|
autoChunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
|
||||||
|
chunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
|
||||||
|
showChunkInput: false,
|
||||||
|
showPromptInput: false,
|
||||||
|
charsPointsPrice: agentModel.charsPointsPrice,
|
||||||
|
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
|
||||||
|
price: agentModel.charsPointsPrice
|
||||||
|
}),
|
||||||
|
uploadRate: 100
|
||||||
|
},
|
||||||
|
[TrainingModeEnum.chunk]: {
|
||||||
|
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
|
||||||
|
chunkOverlapRatio: 0.2,
|
||||||
|
maxChunkSize: vectorModel?.maxToken || 512,
|
||||||
|
minChunkSize: 100,
|
||||||
|
autoChunkSize: vectorModel?.defaultToken || 512,
|
||||||
|
chunkSize: embeddingChunkSize,
|
||||||
|
showChunkInput: true,
|
||||||
|
showPromptInput: false,
|
||||||
|
charsPointsPrice: vectorModel.charsPointsPrice,
|
||||||
|
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
|
||||||
|
price: vectorModel.charsPointsPrice
|
||||||
|
}),
|
||||||
|
uploadRate: 150
|
||||||
|
},
|
||||||
|
[TrainingModeEnum.qa]: {
|
||||||
|
chunkOverlapRatio: 0,
|
||||||
|
maxChunkSize: 8000,
|
||||||
|
minChunkSize: 3000,
|
||||||
|
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
|
||||||
|
chunkSize: agentModel.maxContext * 0.55 || 6000,
|
||||||
|
showChunkInput: false,
|
||||||
|
showPromptInput: true,
|
||||||
|
charsPointsPrice: agentModel.charsPointsPrice,
|
||||||
|
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
|
||||||
|
price: agentModel?.charsPointsPrice
|
||||||
|
}),
|
||||||
|
uploadRate: 30
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const selectModelStaticParam = modeStaticParams[mode];
|
||||||
|
|
||||||
|
const wayStaticPrams = {
|
||||||
|
[ImportProcessWayEnum.auto]: {
|
||||||
|
chunkSize: selectModelStaticParam.autoChunkSize,
|
||||||
|
customSplitChar: ''
|
||||||
|
},
|
||||||
|
[ImportProcessWayEnum.custom]: {
|
||||||
|
chunkSize: modeStaticParams[mode].chunkSize,
|
||||||
|
customSplitChar
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const chunkSize = wayStaticPrams[way].chunkSize;
|
||||||
|
|
||||||
|
const contextValue = {
|
||||||
|
importSource: source,
|
||||||
|
parentId,
|
||||||
|
activeStep,
|
||||||
|
goToNext,
|
||||||
|
|
||||||
|
processParamsForm,
|
||||||
|
...selectModelStaticParam,
|
||||||
|
sources,
|
||||||
|
setSources,
|
||||||
|
chunkSize
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DatasetImportContext.Provider value={contextValue}>
|
||||||
|
<Flex>
|
||||||
|
{activeStep === 0 ? (
|
||||||
|
<Flex alignItems={'center'}>
|
||||||
|
<IconButton
|
||||||
|
icon={<MyIcon name={'common/backFill'} w={'14px'} />}
|
||||||
|
aria-label={''}
|
||||||
|
size={'smSquare'}
|
||||||
|
w={'26px'}
|
||||||
|
h={'26px'}
|
||||||
|
borderRadius={'50%'}
|
||||||
|
variant={'whiteBase'}
|
||||||
|
mr={2}
|
||||||
|
onClick={() =>
|
||||||
|
router.replace({
|
||||||
|
query: {
|
||||||
|
...router.query,
|
||||||
|
currentTab: TabEnum.collectionCard
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
{t('common.Exit')}
|
||||||
|
</Flex>
|
||||||
|
) : (
|
||||||
|
<Button
|
||||||
|
variant={'whiteBase'}
|
||||||
|
leftIcon={<MyIcon name={'common/backFill'} w={'14px'} />}
|
||||||
|
onClick={goToPrevious}
|
||||||
|
>
|
||||||
|
{t('common.Last Step')}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
<Box flex={1} />
|
||||||
|
</Flex>
|
||||||
|
{/* step */}
|
||||||
|
<Box
|
||||||
|
mt={4}
|
||||||
|
mb={5}
|
||||||
|
px={3}
|
||||||
|
py={[2, 4]}
|
||||||
|
bg={'myGray.50'}
|
||||||
|
borderWidth={'1px'}
|
||||||
|
borderColor={'borderColor.low'}
|
||||||
|
borderRadius={'md'}
|
||||||
|
>
|
||||||
|
<Box maxW={['100%', '900px']} mx={'auto'}>
|
||||||
|
<MyStep />
|
||||||
|
</Box>
|
||||||
|
</Box>
|
||||||
|
{children}
|
||||||
|
</DatasetImportContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default DatasetImportContextProvider;
|
||||||
@ -1,165 +0,0 @@
|
|||||||
import React, { useContext, createContext, useState, useMemo, useEffect } from 'react';
|
|
||||||
|
|
||||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
|
||||||
import { useTranslation } from 'next-i18next';
|
|
||||||
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
|
||||||
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
|
|
||||||
import { UseFormReturn, useForm } from 'react-hook-form';
|
|
||||||
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
|
|
||||||
import { ImportSourceItemType } from '@/web/core/dataset/type';
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
|
||||||
|
|
||||||
type ChunkSizeFieldType = 'embeddingChunkSize';
|
|
||||||
export type FormType = {
|
|
||||||
mode: `${TrainingModeEnum}`;
|
|
||||||
way: `${ImportProcessWayEnum}`;
|
|
||||||
embeddingChunkSize: number;
|
|
||||||
customSplitChar: string;
|
|
||||||
qaPrompt: string;
|
|
||||||
webSelector: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type useImportStoreType = {
|
|
||||||
parentId?: string;
|
|
||||||
processParamsForm: UseFormReturn<FormType, any>;
|
|
||||||
chunkSizeField?: ChunkSizeFieldType;
|
|
||||||
maxChunkSize: number;
|
|
||||||
minChunkSize: number;
|
|
||||||
showChunkInput: boolean;
|
|
||||||
showPromptInput: boolean;
|
|
||||||
sources: ImportSourceItemType[];
|
|
||||||
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
|
|
||||||
chunkSize: number;
|
|
||||||
chunkOverlapRatio: number;
|
|
||||||
priceTip: string;
|
|
||||||
uploadRate: number;
|
|
||||||
importSource: `${ImportDataSourceEnum}`;
|
|
||||||
};
|
|
||||||
const StateContext = createContext<useImportStoreType>({
|
|
||||||
processParamsForm: {} as any,
|
|
||||||
sources: [],
|
|
||||||
setSources: function (value: React.SetStateAction<ImportSourceItemType[]>): void {
|
|
||||||
throw new Error('Function not implemented.');
|
|
||||||
},
|
|
||||||
maxChunkSize: 0,
|
|
||||||
minChunkSize: 0,
|
|
||||||
showChunkInput: false,
|
|
||||||
showPromptInput: false,
|
|
||||||
chunkSizeField: 'embeddingChunkSize',
|
|
||||||
chunkSize: 0,
|
|
||||||
chunkOverlapRatio: 0,
|
|
||||||
priceTip: '',
|
|
||||||
uploadRate: 50,
|
|
||||||
importSource: ImportDataSourceEnum.fileLocal
|
|
||||||
});
|
|
||||||
|
|
||||||
export const useImportStore = () => useContext(StateContext);
|
|
||||||
|
|
||||||
const Provider = ({
|
|
||||||
importSource,
|
|
||||||
dataset,
|
|
||||||
parentId,
|
|
||||||
children
|
|
||||||
}: {
|
|
||||||
importSource: `${ImportDataSourceEnum}`;
|
|
||||||
dataset: DatasetItemType;
|
|
||||||
parentId?: string;
|
|
||||||
children: React.ReactNode;
|
|
||||||
}) => {
|
|
||||||
const vectorModel = dataset.vectorModel;
|
|
||||||
const agentModel = dataset.agentModel;
|
|
||||||
|
|
||||||
const processParamsForm = useForm<FormType>({
|
|
||||||
defaultValues: {
|
|
||||||
mode: TrainingModeEnum.chunk,
|
|
||||||
way: ImportProcessWayEnum.auto,
|
|
||||||
embeddingChunkSize: vectorModel?.defaultToken || 512,
|
|
||||||
customSplitChar: '',
|
|
||||||
qaPrompt: Prompt_AgentQA.description,
|
|
||||||
webSelector: ''
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const { t } = useTranslation();
|
|
||||||
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
|
|
||||||
|
|
||||||
// watch form
|
|
||||||
const mode = processParamsForm.watch('mode');
|
|
||||||
const way = processParamsForm.watch('way');
|
|
||||||
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
|
|
||||||
const customSplitChar = processParamsForm.watch('customSplitChar');
|
|
||||||
|
|
||||||
const modeStaticParams = {
|
|
||||||
[TrainingModeEnum.auto]: {
|
|
||||||
chunkOverlapRatio: 0.2,
|
|
||||||
maxChunkSize: 2048,
|
|
||||||
minChunkSize: 100,
|
|
||||||
autoChunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
|
|
||||||
chunkSize: vectorModel?.defaultToken ? vectorModel?.defaultToken * 2 : 1024,
|
|
||||||
showChunkInput: false,
|
|
||||||
showPromptInput: false,
|
|
||||||
charsPointsPrice: agentModel.charsPointsPrice,
|
|
||||||
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
|
|
||||||
price: agentModel.charsPointsPrice
|
|
||||||
}),
|
|
||||||
uploadRate: 100
|
|
||||||
},
|
|
||||||
[TrainingModeEnum.chunk]: {
|
|
||||||
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
|
|
||||||
chunkOverlapRatio: 0.2,
|
|
||||||
maxChunkSize: vectorModel?.maxToken || 512,
|
|
||||||
minChunkSize: 100,
|
|
||||||
autoChunkSize: vectorModel?.defaultToken || 512,
|
|
||||||
chunkSize: embeddingChunkSize,
|
|
||||||
showChunkInput: true,
|
|
||||||
showPromptInput: false,
|
|
||||||
charsPointsPrice: vectorModel.charsPointsPrice,
|
|
||||||
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
|
|
||||||
price: vectorModel.charsPointsPrice
|
|
||||||
}),
|
|
||||||
uploadRate: 150
|
|
||||||
},
|
|
||||||
[TrainingModeEnum.qa]: {
|
|
||||||
chunkOverlapRatio: 0,
|
|
||||||
maxChunkSize: 8000,
|
|
||||||
minChunkSize: 3000,
|
|
||||||
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
|
|
||||||
chunkSize: agentModel.maxContext * 0.55 || 6000,
|
|
||||||
showChunkInput: false,
|
|
||||||
showPromptInput: true,
|
|
||||||
charsPointsPrice: agentModel.charsPointsPrice,
|
|
||||||
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
|
|
||||||
price: agentModel?.charsPointsPrice
|
|
||||||
}),
|
|
||||||
uploadRate: 30
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const selectModelStaticParam = useMemo(() => modeStaticParams[mode], [mode]);
|
|
||||||
|
|
||||||
const wayStaticPrams = {
|
|
||||||
[ImportProcessWayEnum.auto]: {
|
|
||||||
chunkSize: selectModelStaticParam.autoChunkSize,
|
|
||||||
customSplitChar: ''
|
|
||||||
},
|
|
||||||
[ImportProcessWayEnum.custom]: {
|
|
||||||
chunkSize: modeStaticParams[mode].chunkSize,
|
|
||||||
customSplitChar
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const chunkSize = wayStaticPrams[way].chunkSize;
|
|
||||||
|
|
||||||
const value: useImportStoreType = {
|
|
||||||
parentId,
|
|
||||||
processParamsForm,
|
|
||||||
...selectModelStaticParam,
|
|
||||||
sources,
|
|
||||||
setSources,
|
|
||||||
chunkSize,
|
|
||||||
|
|
||||||
importSource
|
|
||||||
};
|
|
||||||
return <StateContext.Provider value={value}>{children}</StateContext.Provider>;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default React.memo(Provider);
|
|
||||||
@ -20,23 +20,20 @@ import LeftRadio from '@fastgpt/web/components/common/Radio/LeftRadio';
|
|||||||
import { TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
import { TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
|
import { ImportProcessWayEnum } from '@/web/core/dataset/constants';
|
||||||
import MyTooltip from '@/components/MyTooltip';
|
import MyTooltip from '@/components/MyTooltip';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import MyModal from '@fastgpt/web/components/common/MyModal';
|
import MyModal from '@fastgpt/web/components/common/MyModal';
|
||||||
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
|
import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent';
|
||||||
import Preview from '../components/Preview';
|
import Preview from '../components/Preview';
|
||||||
import Tag from '@fastgpt/web/components/common/Tag/index';
|
import Tag from '@fastgpt/web/components/common/Tag/index';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
function DataProcess({
|
function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean }) {
|
||||||
showPreviewChunks = true,
|
|
||||||
goToNext
|
|
||||||
}: {
|
|
||||||
showPreviewChunks: boolean;
|
|
||||||
goToNext: () => void;
|
|
||||||
}) {
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { feConfigs } = useSystemStore();
|
const { feConfigs } = useSystemStore();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
goToNext,
|
||||||
processParamsForm,
|
processParamsForm,
|
||||||
chunkSizeField,
|
chunkSizeField,
|
||||||
minChunkSize,
|
minChunkSize,
|
||||||
@ -44,7 +41,7 @@ function DataProcess({
|
|||||||
showPromptInput,
|
showPromptInput,
|
||||||
maxChunkSize,
|
maxChunkSize,
|
||||||
priceTip
|
priceTip
|
||||||
} = useImportStore();
|
} = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
const { getValues, setValue, register } = processParamsForm;
|
const { getValues, setValue, register } = processParamsForm;
|
||||||
const [refresh, setRefresh] = useState(false);
|
const [refresh, setRefresh] = useState(false);
|
||||||
|
|
||||||
|
|||||||
@ -2,15 +2,12 @@ import React from 'react';
|
|||||||
import Preview from '../components/Preview';
|
import Preview from '../components/Preview';
|
||||||
import { Box, Button, Flex } from '@chakra-ui/react';
|
import { Box, Button, Flex } from '@chakra-ui/react';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const PreviewData = ({
|
const PreviewData = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
|
||||||
showPreviewChunks,
|
|
||||||
goToNext
|
|
||||||
}: {
|
|
||||||
showPreviewChunks: boolean;
|
|
||||||
goToNext: () => void;
|
|
||||||
}) => {
|
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const goToNext = useContextSelector(DatasetImportContext, (v) => v.goToNext);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Flex flexDirection={'column'} h={'100%'}>
|
<Flex flexDirection={'column'} h={'100%'}>
|
||||||
|
|||||||
@ -11,7 +11,6 @@ import {
|
|||||||
Flex,
|
Flex,
|
||||||
Button
|
Button
|
||||||
} from '@chakra-ui/react';
|
} from '@chakra-ui/react';
|
||||||
import { useImportStore, type FormType } from '../Provider';
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
@ -28,20 +27,23 @@ import {
|
|||||||
} from '@/web/core/dataset/api';
|
} from '@/web/core/dataset/api';
|
||||||
import Tag from '@fastgpt/web/components/common/Tag/index';
|
import Tag from '@fastgpt/web/components/common/Tag/index';
|
||||||
import { useI18n } from '@/web/context/I18n';
|
import { useI18n } from '@/web/context/I18n';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
import { DatasetImportContext, type ImportFormType } from '../Context';
|
||||||
|
|
||||||
const Upload = () => {
|
const Upload = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { fileT } = useI18n();
|
const { fileT } = useI18n();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { datasetDetail } = useDatasetStore();
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
const { importSource, parentId, sources, setSources, processParamsForm, chunkSize } =
|
const { importSource, parentId, sources, setSources, processParamsForm, chunkSize } =
|
||||||
useImportStore();
|
useContextSelector(DatasetImportContext, (v) => v);
|
||||||
|
|
||||||
const { handleSubmit } = processParamsForm;
|
const { handleSubmit } = processParamsForm;
|
||||||
|
|
||||||
const { mutate: startUpload, isLoading } = useRequest({
|
const { mutate: startUpload, isLoading } = useRequest({
|
||||||
mutationFn: async ({ mode, customSplitChar, qaPrompt, webSelector }: FormType) => {
|
mutationFn: async ({ mode, customSplitChar, qaPrompt, webSelector }: ImportFormType) => {
|
||||||
if (sources.length === 0) return;
|
if (sources.length === 0) return;
|
||||||
const filterWaitingSources = sources.filter((item) => item.createStatus === 'waiting');
|
const filterWaitingSources = sources.filter((item) => item.createStatus === 'waiting');
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
|||||||
const FileModeSelector = ({ onClose }: { onClose: () => void }) => {
|
const FileModeSelector = ({ onClose }: { onClose: () => void }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const [value, setValue] = useState<`${ImportDataSourceEnum}`>(ImportDataSourceEnum.fileLocal);
|
const [value, setValue] = useState<ImportDataSourceEnum>(ImportDataSourceEnum.fileLocal);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<MyModal
|
<MyModal
|
||||||
|
|||||||
@ -3,17 +3,18 @@ import { Box, Flex, IconButton } from '@chakra-ui/react';
|
|||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
|
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
||||||
import { ImportSourceItemType } from '@/web/core/dataset/type';
|
import { ImportSourceItemType } from '@/web/core/dataset/type';
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
const PreviewRawText = dynamic(() => import('./PreviewRawText'));
|
const PreviewRawText = dynamic(() => import('./PreviewRawText'));
|
||||||
const PreviewChunks = dynamic(() => import('./PreviewChunks'));
|
const PreviewChunks = dynamic(() => import('./PreviewChunks'));
|
||||||
|
|
||||||
const Preview = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
|
const Preview = ({ showPreviewChunks }: { showPreviewChunks: boolean }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const { sources } = useImportStore();
|
const { sources } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
const [previewRawTextSource, setPreviewRawTextSource] = useState<ImportSourceItemType>();
|
const [previewRawTextSource, setPreviewRawTextSource] = useState<ImportSourceItemType>();
|
||||||
const [previewChunkSource, setPreviewChunkSource] = useState<ImportSourceItemType>();
|
const [previewChunkSource, setPreviewChunkSource] = useState<ImportSourceItemType>();
|
||||||
|
|
||||||
|
|||||||
@ -4,11 +4,12 @@ import { ImportSourceItemType } from '@/web/core/dataset/type';
|
|||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
|
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
|
||||||
import { getPreviewChunks } from '@/web/core/dataset/api';
|
import { getPreviewChunks } from '@/web/core/dataset/api';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const PreviewChunks = ({
|
const PreviewChunks = ({
|
||||||
previewSource,
|
previewSource,
|
||||||
@ -18,7 +19,10 @@ const PreviewChunks = ({
|
|||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
}) => {
|
}) => {
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { importSource, chunkSize, chunkOverlapRatio, processParamsForm } = useImportStore();
|
const { importSource, chunkSize, chunkOverlapRatio, processParamsForm } = useContextSelector(
|
||||||
|
DatasetImportContext,
|
||||||
|
(v) => v
|
||||||
|
);
|
||||||
|
|
||||||
const { data = [], isLoading } = useQuery(
|
const { data = [], isLoading } = useQuery(
|
||||||
['previewSource'],
|
['previewSource'],
|
||||||
|
|||||||
@ -4,10 +4,11 @@ import { ImportSourceItemType } from '@/web/core/dataset/type';
|
|||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import { getPreviewFileContent } from '@/web/common/file/api';
|
import { getPreviewFileContent } from '@/web/common/file/api';
|
||||||
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
|
import MyRightDrawer from '@fastgpt/web/components/common/MyDrawer/MyRightDrawer';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const PreviewRawText = ({
|
const PreviewRawText = ({
|
||||||
previewSource,
|
previewSource,
|
||||||
@ -17,7 +18,7 @@ const PreviewRawText = ({
|
|||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
}) => {
|
}) => {
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { importSource } = useImportStore();
|
const { importSource } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
|
|
||||||
const { data, isLoading } = useQuery(
|
const { data, isLoading } = useQuery(
|
||||||
['previewSource', previewSource?.dbFileId],
|
['previewSource', previewSource?.dbFileId],
|
||||||
|
|||||||
@ -0,0 +1,188 @@
|
|||||||
|
import React, { useEffect } from 'react';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { useFieldArray, useForm } from 'react-hook-form';
|
||||||
|
import {
|
||||||
|
Box,
|
||||||
|
Button,
|
||||||
|
Flex,
|
||||||
|
Table,
|
||||||
|
Thead,
|
||||||
|
Tbody,
|
||||||
|
Tr,
|
||||||
|
Th,
|
||||||
|
Td,
|
||||||
|
TableContainer,
|
||||||
|
Input
|
||||||
|
} from '@chakra-ui/react';
|
||||||
|
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||||
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
|
import Loading from '@fastgpt/web/components/common/MyLoading';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
import { getFileIcon } from '@fastgpt/global/common/file/icon';
|
||||||
|
import { useI18n } from '@/web/context/I18n';
|
||||||
|
import { SmallAddIcon } from '@chakra-ui/icons';
|
||||||
|
|
||||||
|
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
||||||
|
loading: () => <Loading fixed={false} />
|
||||||
|
});
|
||||||
|
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
||||||
|
|
||||||
|
const ExternalFileCollection = () => {
|
||||||
|
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{activeStep === 0 && <CustomLinkInput />}
|
||||||
|
{activeStep === 1 && <DataProcess showPreviewChunks={true} />}
|
||||||
|
{activeStep === 2 && <Upload />}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default React.memo(ExternalFileCollection);
|
||||||
|
|
||||||
|
const CustomLinkInput = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { datasetT, commonT } = useI18n();
|
||||||
|
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
|
const { register, reset, handleSubmit, control } = useForm<{
|
||||||
|
list: {
|
||||||
|
sourceName: string;
|
||||||
|
sourceUrl: string;
|
||||||
|
externalId: string;
|
||||||
|
}[];
|
||||||
|
}>({
|
||||||
|
defaultValues: {
|
||||||
|
list: [
|
||||||
|
{
|
||||||
|
sourceName: '',
|
||||||
|
sourceUrl: '',
|
||||||
|
externalId: ''
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
fields: list,
|
||||||
|
append,
|
||||||
|
remove,
|
||||||
|
update
|
||||||
|
} = useFieldArray({
|
||||||
|
control,
|
||||||
|
name: 'list'
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (sources.length > 0) {
|
||||||
|
reset({
|
||||||
|
list: sources.map((item) => ({
|
||||||
|
sourceName: item.sourceName,
|
||||||
|
sourceUrl: item.sourceUrl || '',
|
||||||
|
externalId: item.externalId || ''
|
||||||
|
}))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Box>
|
||||||
|
<TableContainer>
|
||||||
|
<Table bg={'white'}>
|
||||||
|
<Thead>
|
||||||
|
<Tr bg={'myGray.50'}>
|
||||||
|
<Th>{datasetT('External url')}</Th>
|
||||||
|
<Th>{datasetT('External id')}</Th>
|
||||||
|
<Th>{datasetT('filename')}</Th>
|
||||||
|
<Th></Th>
|
||||||
|
</Tr>
|
||||||
|
</Thead>
|
||||||
|
<Tbody>
|
||||||
|
{list.map((item, index) => (
|
||||||
|
<Tr key={item.id}>
|
||||||
|
<Td>
|
||||||
|
<Input
|
||||||
|
{...register(`list.${index}.sourceUrl`, {
|
||||||
|
required: index !== list.length - 1,
|
||||||
|
onBlur(e) {
|
||||||
|
const val = (e.target.value || '') as string;
|
||||||
|
if (val.includes('.') && !list[index]?.sourceName) {
|
||||||
|
const sourceName = val.split('/').pop() || '';
|
||||||
|
update(index, {
|
||||||
|
...list[index],
|
||||||
|
sourceUrl: val,
|
||||||
|
sourceName: decodeURIComponent(sourceName)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (val && index === list.length - 1) {
|
||||||
|
append({
|
||||||
|
sourceName: '',
|
||||||
|
sourceUrl: '',
|
||||||
|
externalId: ''
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})}
|
||||||
|
/>
|
||||||
|
</Td>
|
||||||
|
<Td>
|
||||||
|
<Input {...register(`list.${index}.externalId`)} />
|
||||||
|
</Td>
|
||||||
|
<Td>
|
||||||
|
<Input {...register(`list.${index}.sourceName`)} />
|
||||||
|
</Td>
|
||||||
|
<Td>
|
||||||
|
<MyIcon
|
||||||
|
name={'delete'}
|
||||||
|
w={'16px'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
_hover={{ color: 'red.600' }}
|
||||||
|
onClick={() => remove(index)}
|
||||||
|
/>
|
||||||
|
</Td>
|
||||||
|
</Tr>
|
||||||
|
))}
|
||||||
|
</Tbody>
|
||||||
|
</Table>
|
||||||
|
</TableContainer>
|
||||||
|
<Flex mt={5} justifyContent={'space-between'}>
|
||||||
|
<Button
|
||||||
|
variant={'whitePrimary'}
|
||||||
|
leftIcon={<SmallAddIcon />}
|
||||||
|
onClick={() => {
|
||||||
|
append({
|
||||||
|
sourceName: '',
|
||||||
|
sourceUrl: '',
|
||||||
|
externalId: ''
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{commonT('Add new')}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
isDisabled={list.length === 0}
|
||||||
|
onClick={handleSubmit((data) => {
|
||||||
|
setSources(
|
||||||
|
data.list
|
||||||
|
.filter((item) => !!item.sourceUrl)
|
||||||
|
.map((item) => ({
|
||||||
|
id: getNanoid(32),
|
||||||
|
createStatus: 'waiting',
|
||||||
|
sourceName: item.sourceName || item.sourceUrl,
|
||||||
|
icon: getFileIcon(item.sourceUrl),
|
||||||
|
externalId: item.externalId,
|
||||||
|
sourceUrl: item.sourceUrl
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
goToNext();
|
||||||
|
})}
|
||||||
|
>
|
||||||
|
{t('common.Next Step')}
|
||||||
|
</Button>
|
||||||
|
</Flex>
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
};
|
||||||
@ -1,24 +1,25 @@
|
|||||||
import React, { useCallback, useEffect } from 'react';
|
import React, { useCallback, useEffect } from 'react';
|
||||||
import { ImportDataComponentProps } from '@/web/core/dataset/type.d';
|
|
||||||
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useForm } from 'react-hook-form';
|
import { useForm } from 'react-hook-form';
|
||||||
import { Box, Button, Flex, Input, Textarea } from '@chakra-ui/react';
|
import { Box, Button, Flex, Input, Textarea } from '@chakra-ui/react';
|
||||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||||
import Loading from '@fastgpt/web/components/common/MyLoading';
|
import Loading from '@fastgpt/web/components/common/MyLoading';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
||||||
loading: () => <Loading fixed={false} />
|
loading: () => <Loading fixed={false} />
|
||||||
});
|
});
|
||||||
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
||||||
|
|
||||||
const CustomTet = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
const CustomTet = () => {
|
||||||
|
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{activeStep === 0 && <CustomTextInput goToNext={goToNext} />}
|
{activeStep === 0 && <CustomTextInput />}
|
||||||
{activeStep === 1 && <DataProcess showPreviewChunks goToNext={goToNext} />}
|
{activeStep === 1 && <DataProcess showPreviewChunks />}
|
||||||
{activeStep === 2 && <Upload />}
|
{activeStep === 2 && <Upload />}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
@ -26,9 +27,9 @@ const CustomTet = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
|||||||
|
|
||||||
export default React.memo(CustomTet);
|
export default React.memo(CustomTet);
|
||||||
|
|
||||||
const CustomTextInput = ({ goToNext }: { goToNext: () => void }) => {
|
const CustomTextInput = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { sources, setSources } = useImportStore();
|
const { sources, goToNext, setSources } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
const { register, reset, handleSubmit } = useForm({
|
const { register, reset, handleSubmit } = useForm({
|
||||||
defaultValues: {
|
defaultValues: {
|
||||||
name: '',
|
name: '',
|
||||||
|
|||||||
@ -1,8 +1,5 @@
|
|||||||
import React, { useEffect } from 'react';
|
import React, { useEffect } from 'react';
|
||||||
import { ImportDataComponentProps } from '@/web/core/dataset/type.d';
|
|
||||||
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useForm } from 'react-hook-form';
|
import { useForm } from 'react-hook-form';
|
||||||
import { Box, Button, Flex, Input, Link, Textarea } from '@chakra-ui/react';
|
import { Box, Button, Flex, Input, Link, Textarea } from '@chakra-ui/react';
|
||||||
@ -12,17 +9,21 @@ import { LinkCollectionIcon } from '@fastgpt/global/core/dataset/constants';
|
|||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import { getDocPath } from '@/web/common/system/doc';
|
import { getDocPath } from '@/web/common/system/doc';
|
||||||
import Loading from '@fastgpt/web/components/common/MyLoading';
|
import Loading from '@fastgpt/web/components/common/MyLoading';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
||||||
loading: () => <Loading fixed={false} />
|
loading: () => <Loading fixed={false} />
|
||||||
});
|
});
|
||||||
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
||||||
|
|
||||||
const LinkCollection = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
const LinkCollection = () => {
|
||||||
|
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{activeStep === 0 && <CustomLinkImport goToNext={goToNext} />}
|
{activeStep === 0 && <CustomLinkImport />}
|
||||||
{activeStep === 1 && <DataProcess showPreviewChunks={false} goToNext={goToNext} />}
|
{activeStep === 1 && <DataProcess showPreviewChunks={false} />}
|
||||||
{activeStep === 2 && <Upload />}
|
{activeStep === 2 && <Upload />}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
@ -30,10 +31,13 @@ const LinkCollection = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
|||||||
|
|
||||||
export default React.memo(LinkCollection);
|
export default React.memo(LinkCollection);
|
||||||
|
|
||||||
const CustomLinkImport = ({ goToNext }: { goToNext: () => void }) => {
|
const CustomLinkImport = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { feConfigs } = useSystemStore();
|
const { feConfigs } = useSystemStore();
|
||||||
const { sources, setSources, processParamsForm } = useImportStore();
|
const { goToNext, sources, setSources, processParamsForm } = useContextSelector(
|
||||||
|
DatasetImportContext,
|
||||||
|
(v) => v
|
||||||
|
);
|
||||||
const { register, reset, handleSubmit, watch } = useForm({
|
const { register, reset, handleSubmit, watch } = useForm({
|
||||||
defaultValues: {
|
defaultValues: {
|
||||||
link: ''
|
link: ''
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||||
import { ImportDataComponentProps, ImportSourceItemType } from '@/web/core/dataset/type.d';
|
import { ImportSourceItemType } from '@/web/core/dataset/type.d';
|
||||||
import { Box, Button } from '@chakra-ui/react';
|
import { Box, Button } from '@chakra-ui/react';
|
||||||
import FileSelector from '../components/FileSelector';
|
import FileSelector from '../components/FileSelector';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import Loading from '@fastgpt/web/components/common/MyLoading';
|
import Loading from '@fastgpt/web/components/common/MyLoading';
|
||||||
import { RenderUploadFiles } from '../components/RenderFiles';
|
import { RenderUploadFiles } from '../components/RenderFiles';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
const DataProcess = dynamic(() => import('../commonProgress/DataProcess'), {
|
||||||
loading: () => <Loading fixed={false} />
|
loading: () => <Loading fixed={false} />
|
||||||
@ -16,11 +17,13 @@ const Upload = dynamic(() => import('../commonProgress/Upload'));
|
|||||||
|
|
||||||
const fileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';
|
const fileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';
|
||||||
|
|
||||||
const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
const FileLocal = () => {
|
||||||
|
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{activeStep === 0 && <SelectFile goToNext={goToNext} />}
|
{activeStep === 0 && <SelectFile />}
|
||||||
{activeStep === 1 && <DataProcess showPreviewChunks goToNext={goToNext} />}
|
{activeStep === 1 && <DataProcess showPreviewChunks />}
|
||||||
{activeStep === 2 && <Upload />}
|
{activeStep === 2 && <Upload />}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
@ -28,9 +31,9 @@ const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
|||||||
|
|
||||||
export default React.memo(FileLocal);
|
export default React.memo(FileLocal);
|
||||||
|
|
||||||
const SelectFile = React.memo(function SelectFile({ goToNext }: { goToNext: () => void }) {
|
const SelectFile = React.memo(function SelectFile() {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { sources, setSources } = useImportStore();
|
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
|
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
|
||||||
sources.map((source) => ({
|
sources.map((source) => ({
|
||||||
isUploading: false,
|
isUploading: false,
|
||||||
|
|||||||
@ -1,24 +1,27 @@
|
|||||||
import React, { useEffect, useMemo, useState } from 'react';
|
import React, { useEffect, useMemo, useState } from 'react';
|
||||||
import { ImportDataComponentProps, ImportSourceItemType } from '@/web/core/dataset/type.d';
|
import { ImportSourceItemType } from '@/web/core/dataset/type.d';
|
||||||
import { Box, Button } from '@chakra-ui/react';
|
import { Box, Button } from '@chakra-ui/react';
|
||||||
import FileSelector from '../components/FileSelector';
|
import FileSelector from '../components/FileSelector';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useImportStore } from '../Provider';
|
|
||||||
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { fileDownload } from '@/web/common/file/utils';
|
import { fileDownload } from '@/web/common/file/utils';
|
||||||
import { RenderUploadFiles } from '../components/RenderFiles';
|
import { RenderUploadFiles } from '../components/RenderFiles';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetImportContext } from '../Context';
|
||||||
|
|
||||||
const PreviewData = dynamic(() => import('../commonProgress/PreviewData'));
|
const PreviewData = dynamic(() => import('../commonProgress/PreviewData'));
|
||||||
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
const Upload = dynamic(() => import('../commonProgress/Upload'));
|
||||||
|
|
||||||
const fileType = '.csv';
|
const fileType = '.csv';
|
||||||
|
|
||||||
const FileLocal = ({ activeStep, goToNext }: ImportDataComponentProps) => {
|
const FileLocal = () => {
|
||||||
|
const activeStep = useContextSelector(DatasetImportContext, (v) => v.activeStep);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{activeStep === 0 && <SelectFile goToNext={goToNext} />}
|
{activeStep === 0 && <SelectFile />}
|
||||||
{activeStep === 1 && <PreviewData showPreviewChunks goToNext={goToNext} />}
|
{activeStep === 1 && <PreviewData showPreviewChunks />}
|
||||||
{activeStep === 2 && <Upload />}
|
{activeStep === 2 && <Upload />}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
@ -32,9 +35,9 @@ const csvTemplate = `"第一列内容","第二列内容"
|
|||||||
"结合人工智能的演进历程,AIGC的发展大致可以分为三个阶段,即:早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期),以及快速发展展阶段(21世纪10年代中期至今)。",""
|
"结合人工智能的演进历程,AIGC的发展大致可以分为三个阶段,即:早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期),以及快速发展展阶段(21世纪10年代中期至今)。",""
|
||||||
"AIGC发展分为几个阶段?","早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期)、快速发展展阶段(21世纪10年代中期至今)"`;
|
"AIGC发展分为几个阶段?","早期萌芽阶段(20世纪50年代至90年代中期)、沉淀积累阶段(20世纪90年代中期至21世纪10年代中期)、快速发展展阶段(21世纪10年代中期至今)"`;
|
||||||
|
|
||||||
const SelectFile = React.memo(function SelectFile({ goToNext }: { goToNext: () => void }) {
|
const SelectFile = React.memo(function SelectFile() {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { sources, setSources } = useImportStore();
|
const { goToNext, sources, setSources } = useContextSelector(DatasetImportContext, (v) => v);
|
||||||
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
|
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>(
|
||||||
sources.map((source) => ({
|
sources.map((source) => ({
|
||||||
isUploading: false,
|
isUploading: false,
|
||||||
|
|||||||
@ -1,147 +1,42 @@
|
|||||||
import React, { useMemo } from 'react';
|
import React, { useMemo } from 'react';
|
||||||
import { Box, Button, Flex, IconButton } from '@chakra-ui/react';
|
import { Box, Flex } from '@chakra-ui/react';
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
|
||||||
import { useTranslation } from 'next-i18next';
|
|
||||||
import { useRouter } from 'next/router';
|
|
||||||
import { TabEnum } from '../../index';
|
|
||||||
import { useMyStep } from '@fastgpt/web/hooks/useStep';
|
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import Provider from './Provider';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import DatasetImportContextProvider, { DatasetImportContext } from './Context';
|
||||||
|
|
||||||
const FileLocal = dynamic(() => import('./diffSource/FileLocal'));
|
const FileLocal = dynamic(() => import('./diffSource/FileLocal'));
|
||||||
const FileLink = dynamic(() => import('./diffSource/FileLink'));
|
const FileLink = dynamic(() => import('./diffSource/FileLink'));
|
||||||
const FileCustomText = dynamic(() => import('./diffSource/FileCustomText'));
|
const FileCustomText = dynamic(() => import('./diffSource/FileCustomText'));
|
||||||
const TableLocal = dynamic(() => import('./diffSource/TableLocal'));
|
const TableLocal = dynamic(() => import('./diffSource/TableLocal'));
|
||||||
|
const ExternalFileCollection = dynamic(() => import('./diffSource/ExternalFile'));
|
||||||
|
|
||||||
const ImportDataset = () => {
|
const ImportDataset = () => {
|
||||||
const { t } = useTranslation();
|
const importSource = useContextSelector(DatasetImportContext, (v) => v.importSource);
|
||||||
const router = useRouter();
|
|
||||||
const { datasetDetail } = useDatasetStore();
|
|
||||||
const { source = ImportDataSourceEnum.fileLocal, parentId } = (router.query || {}) as {
|
|
||||||
source: `${ImportDataSourceEnum}`;
|
|
||||||
parentId?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
const modeSteps: Record<`${ImportDataSourceEnum}`, { title: string }[]> = {
|
|
||||||
[ImportDataSourceEnum.fileLocal]: [
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Select file')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Data Preprocessing')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Upload data')
|
|
||||||
}
|
|
||||||
],
|
|
||||||
[ImportDataSourceEnum.fileLink]: [
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Select file')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Data Preprocessing')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Upload data')
|
|
||||||
}
|
|
||||||
],
|
|
||||||
[ImportDataSourceEnum.fileCustom]: [
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Select file')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Data Preprocessing')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Upload data')
|
|
||||||
}
|
|
||||||
],
|
|
||||||
[ImportDataSourceEnum.csvTable]: [
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Select file')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Data Preprocessing')
|
|
||||||
},
|
|
||||||
{
|
|
||||||
title: t('core.dataset.import.Upload data')
|
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
const steps = modeSteps[source];
|
|
||||||
|
|
||||||
const { activeStep, goToNext, goToPrevious, MyStep } = useMyStep({
|
|
||||||
defaultStep: 0,
|
|
||||||
steps
|
|
||||||
});
|
|
||||||
|
|
||||||
const ImportComponent = useMemo(() => {
|
const ImportComponent = useMemo(() => {
|
||||||
if (source === ImportDataSourceEnum.fileLocal) return FileLocal;
|
if (importSource === ImportDataSourceEnum.fileLocal) return FileLocal;
|
||||||
if (source === ImportDataSourceEnum.fileLink) return FileLink;
|
if (importSource === ImportDataSourceEnum.fileLink) return FileLink;
|
||||||
if (source === ImportDataSourceEnum.fileCustom) return FileCustomText;
|
if (importSource === ImportDataSourceEnum.fileCustom) return FileCustomText;
|
||||||
if (source === ImportDataSourceEnum.csvTable) return TableLocal;
|
if (importSource === ImportDataSourceEnum.csvTable) return TableLocal;
|
||||||
}, [source]);
|
if (importSource === ImportDataSourceEnum.externalFile) return ExternalFileCollection;
|
||||||
|
}, [importSource]);
|
||||||
|
|
||||||
return ImportComponent ? (
|
return ImportComponent ? (
|
||||||
<Flex flexDirection={'column'} bg={'white'} h={'100%'} px={[2, 9]} py={[2, 5]}>
|
<Box flex={'1 0 0'} overflow={'auto'} position={'relative'}>
|
||||||
<Flex>
|
<ImportComponent />
|
||||||
{activeStep === 0 ? (
|
</Box>
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<IconButton
|
|
||||||
icon={<MyIcon name={'common/backFill'} w={'14px'} />}
|
|
||||||
aria-label={''}
|
|
||||||
size={'smSquare'}
|
|
||||||
w={'26px'}
|
|
||||||
h={'26px'}
|
|
||||||
borderRadius={'50%'}
|
|
||||||
variant={'whiteBase'}
|
|
||||||
mr={2}
|
|
||||||
onClick={() =>
|
|
||||||
router.replace({
|
|
||||||
query: {
|
|
||||||
...router.query,
|
|
||||||
currentTab: TabEnum.collectionCard
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
{t('common.Exit')}
|
|
||||||
</Flex>
|
|
||||||
) : (
|
|
||||||
<Button
|
|
||||||
variant={'whiteBase'}
|
|
||||||
leftIcon={<MyIcon name={'common/backFill'} w={'14px'} />}
|
|
||||||
onClick={goToPrevious}
|
|
||||||
>
|
|
||||||
{t('common.Last Step')}
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
<Box flex={1} />
|
|
||||||
</Flex>
|
|
||||||
{/* step */}
|
|
||||||
<Box
|
|
||||||
mt={4}
|
|
||||||
mb={5}
|
|
||||||
px={3}
|
|
||||||
py={[2, 4]}
|
|
||||||
bg={'myGray.50'}
|
|
||||||
borderWidth={'1px'}
|
|
||||||
borderColor={'borderColor.low'}
|
|
||||||
borderRadius={'md'}
|
|
||||||
>
|
|
||||||
<Box maxW={['100%', '900px']} mx={'auto'}>
|
|
||||||
<MyStep />
|
|
||||||
</Box>
|
|
||||||
</Box>
|
|
||||||
<Provider dataset={datasetDetail} parentId={parentId} importSource={source}>
|
|
||||||
<Box flex={'1 0 0'} overflow={'auto'} position={'relative'}>
|
|
||||||
<ImportComponent activeStep={activeStep} goToNext={goToNext} />
|
|
||||||
</Box>
|
|
||||||
</Provider>
|
|
||||||
</Flex>
|
|
||||||
) : null;
|
) : null;
|
||||||
};
|
};
|
||||||
|
|
||||||
export default React.memo(ImportDataset);
|
const Render = () => {
|
||||||
|
return (
|
||||||
|
<Flex flexDirection={'column'} bg={'white'} h={'100%'} px={[2, 9]} py={[2, 5]}>
|
||||||
|
<DatasetImportContextProvider>
|
||||||
|
<ImportDataset />
|
||||||
|
</DatasetImportContextProvider>
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default React.memo(Render);
|
||||||
|
|||||||
@ -23,13 +23,14 @@ import type { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
|
|||||||
import { useContextSelector } from 'use-context-selector';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
|
import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
|
||||||
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
|
||||||
const Info = ({ datasetId }: { datasetId: string }) => {
|
const Info = ({ datasetId }: { datasetId: string }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { datasetT } = useI18n();
|
const { datasetT } = useI18n();
|
||||||
const { datasetDetail, loadDatasetDetail, loadDatasets, updateDataset } = useDatasetStore();
|
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
|
||||||
const rebuildingCount = useContextSelector(DatasetPageContext, (v) => v.rebuildingCount);
|
useContextSelector(DatasetPageContext, (v) => v);
|
||||||
const trainingCount = useContextSelector(DatasetPageContext, (v) => v.trainingCount);
|
|
||||||
const refetchDatasetTraining = useContextSelector(
|
const refetchDatasetTraining = useContextSelector(
|
||||||
DatasetPageContext,
|
DatasetPageContext,
|
||||||
(v) => v.refetchDatasetTraining
|
(v) => v.refetchDatasetTraining
|
||||||
@ -82,9 +83,6 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
...data
|
...data
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
onSuccess() {
|
|
||||||
loadDatasets();
|
|
||||||
},
|
|
||||||
successToast: t('common.Update Success'),
|
successToast: t('common.Update Success'),
|
||||||
errorToast: t('common.Update Failed')
|
errorToast: t('common.Update Failed')
|
||||||
});
|
});
|
||||||
@ -117,7 +115,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
},
|
},
|
||||||
onSuccess() {
|
onSuccess() {
|
||||||
refetchDatasetTraining();
|
refetchDatasetTraining();
|
||||||
loadDatasetDetail(datasetId, true);
|
loadDatasetDetail(datasetId);
|
||||||
},
|
},
|
||||||
successToast: datasetT('Rebuild embedding start tip'),
|
successToast: datasetT('Rebuild embedding start tip'),
|
||||||
errorToast: t('common.Update Failed')
|
errorToast: t('common.Update Failed')
|
||||||
@ -128,16 +126,16 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
return (
|
return (
|
||||||
<Box py={5} px={[5, 10]}>
|
<Box py={5} px={[5, 10]}>
|
||||||
<Flex mt={5} w={'100%'} alignItems={'center'}>
|
<Flex mt={5} w={'100%'} alignItems={'center'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.dataset.Dataset ID')}
|
{t('core.dataset.Dataset ID')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box flex={1}>{datasetDetail._id}</Box>
|
<Box flex={1}>{datasetDetail._id}</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={8} w={'100%'} alignItems={'center'} flexWrap={'wrap'}>
|
<Flex mt={8} w={'100%'} alignItems={'center'} flexWrap={'wrap'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.ai.model.Vector Model')}
|
{t('core.ai.model.Vector Model')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box flex={[1, '0 0 300px']}>
|
<Box flex={[1, '0 0 320px']}>
|
||||||
<AIModelSelector
|
<AIModelSelector
|
||||||
w={'100%'}
|
w={'100%'}
|
||||||
value={vectorModel.model}
|
value={vectorModel.model}
|
||||||
@ -162,16 +160,16 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
</Box>
|
</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={8} w={'100%'} alignItems={'center'}>
|
<Flex mt={8} w={'100%'} alignItems={'center'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.Max Token')}
|
{t('core.Max Token')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box flex={[1, '0 0 300px']}>{vectorModel.maxToken}</Box>
|
<Box flex={[1, '0 0 320px']}>{vectorModel.maxToken}</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={6} alignItems={'center'} flexWrap={'wrap'}>
|
<Flex mt={6} alignItems={'center'} flexWrap={'wrap'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.ai.model.Dataset Agent Model')}
|
{t('core.ai.model.Dataset Agent Model')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box flex={[1, '0 0 300px']}>
|
<Box flex={[1, '0 0 320px']}>
|
||||||
<AIModelSelector
|
<AIModelSelector
|
||||||
w={'100%'}
|
w={'100%'}
|
||||||
value={agentModel.model}
|
value={agentModel.model}
|
||||||
@ -188,13 +186,29 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
</Box>
|
</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
|
|
||||||
<MyDivider my={4} h={'2px'} maxW={'500px'} />
|
<MyDivider my={6} h={'2px'} maxW={'500px'} />
|
||||||
|
|
||||||
|
{datasetDetail.type === DatasetTypeEnum.externalFile && (
|
||||||
|
<>
|
||||||
|
<Flex w={'100%'} alignItems={'center'}>
|
||||||
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
|
{datasetT('External read url')}
|
||||||
|
</Box>
|
||||||
|
<Input
|
||||||
|
flex={[1, '0 0 320px']}
|
||||||
|
placeholder="https://test.com/read?fileId={{fileId}}"
|
||||||
|
{...register('externalReadUrl')}
|
||||||
|
/>
|
||||||
|
</Flex>
|
||||||
|
<MyDivider my={6} h={'2px'} maxW={'500px'} />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
<Flex mt={5} w={'100%'} alignItems={'center'}>
|
<Flex mt={5} w={'100%'} alignItems={'center'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.dataset.Avatar')}
|
{t('core.dataset.Avatar')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box flex={[1, '0 0 300px']}>
|
<Box flex={[1, '0 0 320px']}>
|
||||||
<MyTooltip label={t('common.avatar.Select Avatar')}>
|
<MyTooltip label={t('common.avatar.Select Avatar')}>
|
||||||
<Avatar
|
<Avatar
|
||||||
m={'auto'}
|
m={'auto'}
|
||||||
@ -208,18 +222,20 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
</Box>
|
</Box>
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={8} w={'100%'} alignItems={'center'}>
|
<Flex mt={8} w={'100%'} alignItems={'center'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('core.dataset.Name')}
|
{t('core.dataset.Name')}
|
||||||
</Box>
|
</Box>
|
||||||
<Input flex={[1, '0 0 300px']} maxLength={30} {...register('name')} />
|
<Input flex={[1, '0 0 320px']} maxLength={30} {...register('name')} />
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex mt={8} alignItems={'center'} w={'100%'}>
|
<Flex mt={8} alignItems={'center'} w={'100%'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</Box>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']}>
|
||||||
<Textarea flex={[1, '0 0 300px']} {...register('intro')} placeholder={t('common.Intro')} />
|
{t('common.Intro')}
|
||||||
|
</Box>
|
||||||
|
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
|
||||||
</Flex>
|
</Flex>
|
||||||
{datasetDetail.isOwner && (
|
{datasetDetail.isOwner && (
|
||||||
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
|
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
{t('user.Permission')}
|
{t('user.Permission')}
|
||||||
</Box>
|
</Box>
|
||||||
<Box>
|
<Box>
|
||||||
@ -234,7 +250,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
|
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
|
||||||
<Box flex={['0 0 90px', '0 0 160px']} w={0}></Box>
|
<Box fontSize={['sm', 'md']} flex={['0 0 90px', '0 0 160px']} w={0}></Box>
|
||||||
<Button
|
<Button
|
||||||
isLoading={btnLoading}
|
isLoading={btnLoading}
|
||||||
mr={4}
|
mr={4}
|
||||||
|
|||||||
@ -5,17 +5,10 @@ import { useUserStore } from '@/web/support/user/useUserStore';
|
|||||||
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
|
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import Avatar from '@/components/Avatar';
|
import Avatar from '@/components/Avatar';
|
||||||
import {
|
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||||
DatasetStatusEnum,
|
|
||||||
DatasetTypeEnum,
|
|
||||||
DatasetTypeMap
|
|
||||||
} from '@fastgpt/global/core/dataset/constants';
|
|
||||||
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
||||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
|
||||||
import SideTabs from '@/components/SideTabs';
|
import SideTabs from '@/components/SideTabs';
|
||||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import Tabs from '@/components/Tabs';
|
import Tabs from '@/components/Tabs';
|
||||||
import { useContextSelector } from 'use-context-selector';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
@ -36,12 +29,10 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
const { datasetT } = useI18n();
|
const { datasetT } = useI18n();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const query = router.query;
|
const query = router.query;
|
||||||
const { datasetDetail, startWebsiteSync } = useDatasetStore();
|
|
||||||
const { userInfo } = useUserStore();
|
const { userInfo } = useUserStore();
|
||||||
const { isPc, setLoading } = useSystemStore();
|
const { isPc } = useSystemStore();
|
||||||
const vectorTrainingMap = useContextSelector(DatasetPageContext, (v) => v.vectorTrainingMap);
|
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
|
||||||
const agentTrainingMap = useContextSelector(DatasetPageContext, (v) => v.agentTrainingMap);
|
useContextSelector(DatasetPageContext, (v) => v);
|
||||||
const rebuildingCount = useContextSelector(DatasetPageContext, (v) => v.rebuildingCount);
|
|
||||||
|
|
||||||
const tabList = [
|
const tabList = [
|
||||||
{
|
{
|
||||||
@ -67,20 +58,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
[query, router]
|
[query, router]
|
||||||
);
|
);
|
||||||
|
|
||||||
const { ConfirmModal: ConfirmSyncModal, openConfirm: openConfirmSync } = useConfirm({
|
|
||||||
type: 'common'
|
|
||||||
});
|
|
||||||
const { mutate: onUpdateDatasetWebsiteConfig } = useRequest({
|
|
||||||
mutationFn: () => {
|
|
||||||
setLoading(true);
|
|
||||||
return startWebsiteSync();
|
|
||||||
},
|
|
||||||
onSettled() {
|
|
||||||
setLoading(false);
|
|
||||||
},
|
|
||||||
errorToast: t('common.Update Failed')
|
|
||||||
});
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
{isPc ? (
|
{isPc ? (
|
||||||
@ -101,25 +78,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
{DatasetTypeMap[datasetDetail.type] && (
|
{DatasetTypeMap[datasetDetail.type] && (
|
||||||
<Flex alignItems={'center'} pl={2} justifyContent={'space-between'}>
|
<Flex alignItems={'center'} pl={2} justifyContent={'space-between'}>
|
||||||
<DatasetTypeTag type={datasetDetail.type} />
|
<DatasetTypeTag type={datasetDetail.type} />
|
||||||
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
|
|
||||||
datasetDetail.status === DatasetStatusEnum.active && (
|
|
||||||
<MyTooltip label={t('core.dataset.website.Start Sync')}>
|
|
||||||
<MyIcon
|
|
||||||
mt={1}
|
|
||||||
name={'common/refreshLight'}
|
|
||||||
w={'12px'}
|
|
||||||
color={'myGray.500'}
|
|
||||||
cursor={'pointer'}
|
|
||||||
onClick={() =>
|
|
||||||
openConfirmSync(
|
|
||||||
onUpdateDatasetWebsiteConfig,
|
|
||||||
undefined,
|
|
||||||
t('core.dataset.website.Confirm Create Tips')
|
|
||||||
)()
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
</MyTooltip>
|
|
||||||
)}
|
|
||||||
</Flex>
|
</Flex>
|
||||||
)}
|
)}
|
||||||
</Box>
|
</Box>
|
||||||
@ -206,8 +164,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
/>
|
/>
|
||||||
</Box>
|
</Box>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<ConfirmSyncModal />
|
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -25,6 +25,8 @@ import { fileDownload } from '@/web/common/file/utils';
|
|||||||
import QuoteItem from '@/components/core/dataset/QuoteItem';
|
import QuoteItem from '@/components/core/dataset/QuoteItem';
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
|
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
|
||||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||||
|
|
||||||
@ -48,7 +50,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
|
|||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { llmModelList } = useSystemStore();
|
const { llmModelList } = useSystemStore();
|
||||||
const { datasetDetail } = useDatasetStore();
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
const { pushDatasetTestItem } = useSearchTestStore();
|
const { pushDatasetTestItem } = useSearchTestStore();
|
||||||
const [inputType, setInputType] = useState<'text' | 'file'>('text');
|
const [inputType, setInputType] = useState<'text' | 'file'>('text');
|
||||||
const [datasetTestItem, setDatasetTestItem] = useState<SearchTestStoreItemType>();
|
const [datasetTestItem, setDatasetTestItem] = useState<SearchTestStoreItemType>();
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useCallback, useMemo } from 'react';
|
import React from 'react';
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import { Box } from '@chakra-ui/react';
|
import { Box } from '@chakra-ui/react';
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
@ -9,15 +9,17 @@ import PageContainer from '@/components/PageContainer';
|
|||||||
import { serviceSideProps } from '@/web/common/utils/i18n';
|
import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
|
|
||||||
import CollectionCard from './components/CollectionCard';
|
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
|
||||||
|
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
|
||||||
import Head from 'next/head';
|
|
||||||
import Slider from './components/Slider';
|
import Slider from './components/Slider';
|
||||||
import MyBox from '@fastgpt/web/components/common/MyBox';
|
import MyBox from '@fastgpt/web/components/common/MyBox';
|
||||||
import { DatasetPageContextProvider } from '@/web/core/dataset/context/datasetPageContext';
|
import {
|
||||||
|
DatasetPageContext,
|
||||||
|
DatasetPageContextProvider
|
||||||
|
} from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
import CollectionPageContextProvider from './components/CollectionCard/Context';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import NextHead from '@/components/common/NextHead';
|
||||||
|
|
||||||
|
const CollectionCard = dynamic(() => import('./components/CollectionCard/index'));
|
||||||
const DataCard = dynamic(() => import('./components/DataCard'));
|
const DataCard = dynamic(() => import('./components/DataCard'));
|
||||||
const Test = dynamic(() => import('./components/Test'));
|
const Test = dynamic(() => import('./components/Test'));
|
||||||
const Info = dynamic(() => import('./components/Info'));
|
const Info = dynamic(() => import('./components/Info'));
|
||||||
@ -30,16 +32,14 @@ export enum TabEnum {
|
|||||||
info = 'info',
|
info = 'info',
|
||||||
import = 'import'
|
import = 'import'
|
||||||
}
|
}
|
||||||
|
type Props = { datasetId: string; currentTab: TabEnum };
|
||||||
|
|
||||||
const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: TabEnum }) => {
|
const Detail = ({ datasetId, currentTab }: Props) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { datasetDetail, loadDatasetDetail } = useDatasetStore();
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
const loadDatasetDetail = useContextSelector(DatasetPageContext, (v) => v.loadDatasetDetail);
|
||||||
const { ConfirmModal: ConfirmSyncModal, openConfirm: openConfirmSync } = useConfirm({
|
|
||||||
type: 'common'
|
|
||||||
});
|
|
||||||
|
|
||||||
useQuery([datasetId], () => loadDatasetDetail(datasetId), {
|
useQuery([datasetId], () => loadDatasetDetail(datasetId), {
|
||||||
onError(err: any) {
|
onError(err: any) {
|
||||||
@ -53,36 +53,37 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: TabE
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Head>
|
<NextHead title={datasetDetail?.name} icon={datasetDetail?.avatar} />
|
||||||
<title>{datasetDetail?.name}</title>
|
<PageContainer>
|
||||||
</Head>
|
<MyBox display={'flex'} flexDirection={['column', 'row']} h={'100%'} pt={[4, 0]}>
|
||||||
<DatasetPageContextProvider
|
<Slider currentTab={currentTab} />
|
||||||
value={{
|
|
||||||
datasetId
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<PageContainer>
|
|
||||||
<MyBox display={'flex'} flexDirection={['column', 'row']} h={'100%'} pt={[4, 0]}>
|
|
||||||
<Slider currentTab={currentTab} />
|
|
||||||
|
|
||||||
{!!datasetDetail._id && (
|
{!!datasetDetail._id && (
|
||||||
<Box flex={'1 0 0'} pb={0}>
|
<Box flex={'1 0 0'} pb={0}>
|
||||||
{currentTab === TabEnum.collectionCard && <CollectionCard />}
|
{currentTab === TabEnum.collectionCard && (
|
||||||
{currentTab === TabEnum.dataCard && <DataCard />}
|
<CollectionPageContextProvider>
|
||||||
{currentTab === TabEnum.test && <Test datasetId={datasetId} />}
|
<CollectionCard />
|
||||||
{currentTab === TabEnum.info && <Info datasetId={datasetId} />}
|
</CollectionPageContextProvider>
|
||||||
{currentTab === TabEnum.import && <Import />}
|
)}
|
||||||
</Box>
|
{currentTab === TabEnum.dataCard && <DataCard />}
|
||||||
)}
|
{currentTab === TabEnum.test && <Test datasetId={datasetId} />}
|
||||||
</MyBox>
|
{currentTab === TabEnum.info && <Info datasetId={datasetId} />}
|
||||||
</PageContainer>
|
{currentTab === TabEnum.import && <Import />}
|
||||||
</DatasetPageContextProvider>
|
</Box>
|
||||||
|
)}
|
||||||
<ConfirmSyncModal />
|
</MyBox>
|
||||||
|
</PageContainer>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const Render = (data: Props) => (
|
||||||
|
<DatasetPageContextProvider datasetId={data.datasetId}>
|
||||||
|
<Detail {...data} />
|
||||||
|
</DatasetPageContextProvider>
|
||||||
|
);
|
||||||
|
export default Render;
|
||||||
|
|
||||||
export async function getServerSideProps(context: any) {
|
export async function getServerSideProps(context: any) {
|
||||||
const currentTab = context?.query?.currentTab || TabEnum.collectionCard;
|
const currentTab = context?.query?.currentTab || TabEnum.collectionCard;
|
||||||
const datasetId = context?.query?.datasetId;
|
const datasetId = context?.query?.datasetId;
|
||||||
@ -91,5 +92,3 @@ export async function getServerSideProps(context: any) {
|
|||||||
props: { currentTab, datasetId, ...(await serviceSideProps(context, ['dataset', 'file'])) }
|
props: { currentTab, datasetId, ...(await serviceSideProps(context, ['dataset', 'file'])) }
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export default React.memo(Detail);
|
|
||||||
|
|||||||
@ -20,9 +20,11 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
|
|||||||
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||||
import MySelect from '@fastgpt/web/components/common/MySelect';
|
import MySelect from '@fastgpt/web/components/common/MySelect';
|
||||||
import AIModelSelector from '@/components/Select/AIModelSelector';
|
import AIModelSelector from '@/components/Select/AIModelSelector';
|
||||||
|
import { useI18n } from '@/web/context/I18n';
|
||||||
|
|
||||||
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
const { datasetT } = useI18n();
|
||||||
const [refresh, setRefresh] = useState(false);
|
const [refresh, setRefresh] = useState(false);
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@ -102,25 +104,31 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
|
|||||||
gridTemplateColumns={'repeat(1,1fr)'}
|
gridTemplateColumns={'repeat(1,1fr)'}
|
||||||
list={[
|
list={[
|
||||||
{
|
{
|
||||||
title: t('core.dataset.Common Dataset'),
|
title: datasetT('Common Dataset'),
|
||||||
value: DatasetTypeEnum.dataset,
|
value: DatasetTypeEnum.dataset,
|
||||||
icon: 'core/dataset/commonDataset',
|
icon: 'core/dataset/commonDataset',
|
||||||
desc: t('core.dataset.Common Dataset Desc')
|
desc: datasetT('Common Dataset Desc')
|
||||||
},
|
},
|
||||||
...(feConfigs.isPlus
|
...(feConfigs.isPlus
|
||||||
? [
|
? [
|
||||||
{
|
{
|
||||||
title: t('core.dataset.Website Dataset'),
|
title: datasetT('Website Dataset'),
|
||||||
value: DatasetTypeEnum.websiteDataset,
|
value: DatasetTypeEnum.websiteDataset,
|
||||||
icon: 'core/dataset/websiteDataset',
|
icon: 'core/dataset/websiteDataset',
|
||||||
desc: t('core.dataset.Website Dataset Desc')
|
desc: datasetT('Website Dataset Desc')
|
||||||
}
|
}
|
||||||
|
// {
|
||||||
|
// title: datasetT('External File'),
|
||||||
|
// value: DatasetTypeEnum.externalFile,
|
||||||
|
// icon: 'core/dataset/websiteDataset',
|
||||||
|
// desc: datasetT('External file Dataset Desc')
|
||||||
|
// }
|
||||||
]
|
]
|
||||||
: [])
|
: [])
|
||||||
]}
|
]}
|
||||||
value={getValues('type')}
|
value={getValues('type')}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
setValue('type', e as `${DatasetTypeEnum}`);
|
setValue('type', e as DatasetTypeEnum);
|
||||||
setRefresh(!refresh);
|
setRefresh(!refresh);
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -35,7 +35,10 @@ const MoveModal = ({
|
|||||||
const [parentId, setParentId] = useState<string>('');
|
const [parentId, setParentId] = useState<string>('');
|
||||||
|
|
||||||
const { data } = useQuery(['getDatasets', parentId], () => {
|
const { data } = useQuery(['getDatasets', parentId], () => {
|
||||||
return Promise.all([getDatasets({ parentId, type: 'folder' }), getDatasetPaths(parentId)]);
|
return Promise.all([
|
||||||
|
getDatasets({ parentId, type: DatasetTypeEnum.folder }),
|
||||||
|
getDatasetPaths(parentId)
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
const paths = useMemo(
|
const paths = useMemo(
|
||||||
() => [
|
() => [
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import React, { useMemo, useRef } from 'react';
|
import React, { useMemo, useRef, useState } from 'react';
|
||||||
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
|
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
|
||||||
import PageContainer from '@/components/PageContainer';
|
import PageContainer from '@/components/PageContainer';
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
||||||
import { AddIcon } from '@chakra-ui/icons';
|
import { AddIcon } from '@chakra-ui/icons';
|
||||||
@ -35,28 +34,30 @@ import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
|||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||||
import { xmlDownloadFetch } from '@/web/common/api/xmlFetch';
|
import { xmlDownloadFetch } from '@/web/common/api/xmlFetch';
|
||||||
|
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
||||||
|
|
||||||
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
||||||
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
|
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
|
||||||
|
|
||||||
const Kb = () => {
|
const Dataset = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { parentId } = router.query as { parentId: string };
|
const { parentId } = router.query as { parentId: string };
|
||||||
const { setLoading } = useSystemStore();
|
const { setLoading } = useSystemStore();
|
||||||
const { userInfo } = useUserStore();
|
const { userInfo } = useUserStore();
|
||||||
|
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
|
||||||
|
|
||||||
const DeleteTipsMap = useRef({
|
const DeleteTipsMap = useRef({
|
||||||
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
|
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
|
||||||
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
|
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
|
||||||
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm')
|
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
|
||||||
|
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
|
||||||
});
|
});
|
||||||
|
|
||||||
const { openConfirm, ConfirmModal } = useConfirm({
|
const { openConfirm, ConfirmModal } = useConfirm({
|
||||||
type: 'delete'
|
type: 'delete'
|
||||||
});
|
});
|
||||||
const { myDatasets, loadDatasets, setDatasets, updateDataset } = useDatasetStore();
|
|
||||||
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
|
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
|
||||||
title: t('Rename')
|
title: t('Rename')
|
||||||
});
|
});
|
||||||
@ -78,7 +79,7 @@ const Kb = () => {
|
|||||||
return id;
|
return id;
|
||||||
},
|
},
|
||||||
onSuccess(id: string) {
|
onSuccess(id: string) {
|
||||||
setDatasets(myDatasets.filter((item) => item._id !== id));
|
setMyDatasets(myDatasets.filter((item) => item._id !== id));
|
||||||
},
|
},
|
||||||
onSettled() {
|
onSettled() {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
@ -112,7 +113,7 @@ const Kb = () => {
|
|||||||
const { data, refetch, isFetching } = useQuery(
|
const { data, refetch, isFetching } = useQuery(
|
||||||
['loadDataset', parentId],
|
['loadDataset', parentId],
|
||||||
() => {
|
() => {
|
||||||
return Promise.all([loadDatasets(parentId), getDatasetPaths(parentId)]);
|
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
onError(err) {
|
onError(err) {
|
||||||
@ -139,7 +140,10 @@ const Kb = () => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PageContainer isLoading={isFetching} insertProps={{ px: [5, '48px'] }}>
|
<PageContainer
|
||||||
|
isLoading={myDatasets.length === 0 && isFetching}
|
||||||
|
insertProps={{ px: [5, '48px'] }}
|
||||||
|
>
|
||||||
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
|
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
|
||||||
{/* url path */}
|
{/* url path */}
|
||||||
<ParentPaths
|
<ParentPaths
|
||||||
@ -317,7 +321,10 @@ const Kb = () => {
|
|||||||
defaultVal: dataset.name,
|
defaultVal: dataset.name,
|
||||||
onSuccess: (val) => {
|
onSuccess: (val) => {
|
||||||
if (val === dataset.name || !val) return;
|
if (val === dataset.name || !val) return;
|
||||||
updateDataset({ id: dataset._id, name: val });
|
putDatasetById({
|
||||||
|
id: dataset._id,
|
||||||
|
name: val
|
||||||
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
@ -351,7 +358,7 @@ const Kb = () => {
|
|||||||
</Flex>
|
</Flex>
|
||||||
),
|
),
|
||||||
onClick: () => {
|
onClick: () => {
|
||||||
updateDataset({
|
putDatasetById({
|
||||||
id: dataset._id,
|
id: dataset._id,
|
||||||
permission: PermissionTypeEnum.public
|
permission: PermissionTypeEnum.public
|
||||||
});
|
});
|
||||||
@ -371,7 +378,7 @@ const Kb = () => {
|
|||||||
</Flex>
|
</Flex>
|
||||||
),
|
),
|
||||||
onClick: () => {
|
onClick: () => {
|
||||||
updateDataset({
|
putDatasetById({
|
||||||
id: dataset._id,
|
id: dataset._id,
|
||||||
permission: PermissionTypeEnum.private
|
permission: PermissionTypeEnum.private
|
||||||
});
|
});
|
||||||
@ -476,9 +483,9 @@ const Kb = () => {
|
|||||||
export async function getServerSideProps(content: any) {
|
export async function getServerSideProps(content: any) {
|
||||||
return {
|
return {
|
||||||
props: {
|
props: {
|
||||||
...(await serviceSideProps(content))
|
...(await serviceSideProps(content, ['dataset']))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export default Kb;
|
export default Dataset;
|
||||||
|
|||||||
@ -66,7 +66,7 @@ export const pushQAUsage = async ({
|
|||||||
modelType: ModelTypeEnum.llm,
|
modelType: ModelTypeEnum.llm,
|
||||||
tokens
|
tokens
|
||||||
});
|
});
|
||||||
console.log(tokens, '----');
|
|
||||||
concatUsage({
|
concatUsage({
|
||||||
billId,
|
billId,
|
||||||
teamId,
|
teamId,
|
||||||
|
|||||||
@ -1,6 +1,10 @@
|
|||||||
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
|
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
|
||||||
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
||||||
import type { DatasetItemType, DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
import type {
|
||||||
|
DatasetItemType,
|
||||||
|
DatasetListItemType,
|
||||||
|
DatasetSimpleItemType
|
||||||
|
} from '@fastgpt/global/core/dataset/type.d';
|
||||||
import type {
|
import type {
|
||||||
GetDatasetCollectionsProps,
|
GetDatasetCollectionsProps,
|
||||||
GetDatasetDataListProps,
|
GetDatasetDataListProps,
|
||||||
@ -39,13 +43,13 @@ import type { getDatasetTrainingQueueResponse } from '@/pages/api/core/dataset/t
|
|||||||
import type { rebuildEmbeddingBody } from '@/pages/api/core/dataset/training/rebuildEmbedding';
|
import type { rebuildEmbeddingBody } from '@/pages/api/core/dataset/training/rebuildEmbedding';
|
||||||
|
|
||||||
/* ======================== dataset ======================= */
|
/* ======================== dataset ======================= */
|
||||||
export const getDatasets = (data: { parentId?: string; type?: `${DatasetTypeEnum}` }) =>
|
export const getDatasets = (data: { parentId?: string; type?: DatasetTypeEnum }) =>
|
||||||
GET<DatasetListItemType[]>(`/core/dataset/list`, data);
|
GET<DatasetListItemType[]>(`/core/dataset/list`, data);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get type=dataset list
|
* get type=dataset list
|
||||||
*/
|
*/
|
||||||
export const getAllDataset = () => GET<DatasetListItemType[]>(`/core/dataset/allDataset`);
|
export const getAllDataset = () => GET<DatasetSimpleItemType[]>(`/core/dataset/allDataset`);
|
||||||
|
|
||||||
export const getDatasetPaths = (parentId?: string) =>
|
export const getDatasetPaths = (parentId?: string) =>
|
||||||
GET<ParentTreePathItemType[]>('/core/dataset/paths', { parentId });
|
GET<ParentTreePathItemType[]>('/core/dataset/paths', { parentId });
|
||||||
|
|||||||
@ -11,6 +11,8 @@ import { useQuery } from '@tanstack/react-query';
|
|||||||
import React, { useMemo, useState } from 'react';
|
import React, { useMemo, useState } from 'react';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useLoading } from '@fastgpt/web/hooks/useLoading';
|
import { useLoading } from '@fastgpt/web/hooks/useLoading';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetPageContext } from '../context/datasetPageContext';
|
||||||
|
|
||||||
const SelectCollections = ({
|
const SelectCollections = ({
|
||||||
datasetId,
|
datasetId,
|
||||||
@ -37,7 +39,8 @@ const SelectCollections = ({
|
|||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const { datasetDetail, loadDatasetDetail } = useDatasetStore();
|
const { loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
|
||||||
|
|
||||||
const { Loading } = useLoading();
|
const { Loading } = useLoading();
|
||||||
const [selectedDatasetCollectionIds, setSelectedDatasetCollectionIds] =
|
const [selectedDatasetCollectionIds, setSelectedDatasetCollectionIds] =
|
||||||
useState<string[]>(defaultSelectedId);
|
useState<string[]>(defaultSelectedId);
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import { defaultQAModels, defaultVectorModels } from '@fastgpt/global/core/ai/model';
|
import { defaultQAModels, defaultVectorModels } from '@fastgpt/global/core/ai/model';
|
||||||
|
import { DatasetTypeEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import type {
|
import type {
|
||||||
DatasetCollectionItemType,
|
DatasetCollectionItemType,
|
||||||
DatasetItemType
|
DatasetItemType
|
||||||
@ -11,7 +12,7 @@ export const defaultDatasetDetail: DatasetItemType = {
|
|||||||
teamId: '',
|
teamId: '',
|
||||||
tmbId: '',
|
tmbId: '',
|
||||||
updateTime: new Date(),
|
updateTime: new Date(),
|
||||||
type: 'dataset',
|
type: DatasetTypeEnum.dataset,
|
||||||
avatar: '/icon/logo.svg',
|
avatar: '/icon/logo.svg',
|
||||||
name: '',
|
name: '',
|
||||||
intro: '',
|
intro: '',
|
||||||
@ -34,7 +35,7 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
|
|||||||
teamId: '',
|
teamId: '',
|
||||||
tmbId: '',
|
tmbId: '',
|
||||||
updateTime: new Date(),
|
updateTime: new Date(),
|
||||||
type: 'dataset',
|
type: DatasetTypeEnum.dataset,
|
||||||
avatar: '/icon/logo.svg',
|
avatar: '/icon/logo.svg',
|
||||||
name: '',
|
name: '',
|
||||||
intro: '',
|
intro: '',
|
||||||
@ -51,7 +52,7 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
|
|||||||
sourceName: '',
|
sourceName: '',
|
||||||
sourceId: '',
|
sourceId: '',
|
||||||
createTime: new Date(),
|
createTime: new Date(),
|
||||||
trainingType: 'chunk',
|
trainingType: TrainingModeEnum.chunk,
|
||||||
chunkSize: 0
|
chunkSize: 0
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,23 @@
|
|||||||
import { useQuery } from '@tanstack/react-query';
|
import { useQuery } from '@tanstack/react-query';
|
||||||
import { ReactNode, useMemo } from 'react';
|
import { ReactNode, useMemo, useState } from 'react';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { createContext } from 'use-context-selector';
|
import { createContext } from 'use-context-selector';
|
||||||
import { getDatasetTrainingQueue, getTrainingQueueLen } from '../api';
|
import {
|
||||||
import { useDatasetStore } from '../store/dataset';
|
getDatasetById,
|
||||||
|
getDatasetTrainingQueue,
|
||||||
|
getTrainingQueueLen,
|
||||||
|
putDatasetById
|
||||||
|
} from '../api';
|
||||||
|
import { defaultDatasetDetail } from '../constants';
|
||||||
|
import { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api';
|
||||||
|
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
||||||
|
|
||||||
type DatasetPageContextType = {
|
type DatasetPageContextType = {
|
||||||
|
datasetId: string;
|
||||||
|
datasetDetail: DatasetItemType;
|
||||||
|
loadDatasetDetail: (id: string) => Promise<DatasetItemType>;
|
||||||
|
updateDataset: (data: DatasetUpdateBody) => Promise<void>;
|
||||||
|
|
||||||
vectorTrainingMap: {
|
vectorTrainingMap: {
|
||||||
colorSchema: string;
|
colorSchema: string;
|
||||||
tip: string;
|
tip: string;
|
||||||
@ -19,10 +31,6 @@ type DatasetPageContextType = {
|
|||||||
refetchDatasetTraining: () => void;
|
refetchDatasetTraining: () => void;
|
||||||
};
|
};
|
||||||
|
|
||||||
type DatasetPageContextValueType = {
|
|
||||||
datasetId: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const DatasetPageContext = createContext<DatasetPageContextType>({
|
export const DatasetPageContext = createContext<DatasetPageContextType>({
|
||||||
vectorTrainingMap: {
|
vectorTrainingMap: {
|
||||||
colorSchema: '',
|
colorSchema: '',
|
||||||
@ -36,19 +44,46 @@ export const DatasetPageContext = createContext<DatasetPageContextType>({
|
|||||||
trainingCount: 0,
|
trainingCount: 0,
|
||||||
refetchDatasetTraining: function (): void {
|
refetchDatasetTraining: function (): void {
|
||||||
throw new Error('Function not implemented.');
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
datasetId: '',
|
||||||
|
datasetDetail: defaultDatasetDetail,
|
||||||
|
loadDatasetDetail: function (id: string): Promise<DatasetItemType> {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
|
},
|
||||||
|
updateDataset: function (data: DatasetUpdateBody): Promise<void> {
|
||||||
|
throw new Error('Function not implemented.');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
export const DatasetPageContextProvider = ({
|
export const DatasetPageContextProvider = ({
|
||||||
children,
|
children,
|
||||||
value
|
datasetId
|
||||||
}: {
|
}: {
|
||||||
children: ReactNode;
|
children: ReactNode;
|
||||||
value: DatasetPageContextValueType;
|
datasetId: string;
|
||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { datasetId } = value;
|
|
||||||
const { datasetDetail } = useDatasetStore();
|
// dataset detail
|
||||||
|
const [datasetDetail, setDatasetDetail] = useState(defaultDatasetDetail);
|
||||||
|
|
||||||
|
const loadDatasetDetail = async (id: string) => {
|
||||||
|
const data = await getDatasetById(id);
|
||||||
|
|
||||||
|
setDatasetDetail(data);
|
||||||
|
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
const updateDataset = async (data: DatasetUpdateBody) => {
|
||||||
|
await putDatasetById(data);
|
||||||
|
|
||||||
|
if (datasetId === data.id) {
|
||||||
|
setDatasetDetail((state) => ({
|
||||||
|
...state,
|
||||||
|
...data
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// global queue
|
// global queue
|
||||||
const { data: { vectorTrainingCount = 0, agentTrainingCount = 0 } = {} } = useQuery(
|
const { data: { vectorTrainingCount = 0, agentTrainingCount = 0 } = {} } = useQuery(
|
||||||
@ -108,6 +143,11 @@ export const DatasetPageContextProvider = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const contextValue: DatasetPageContextType = {
|
const contextValue: DatasetPageContextType = {
|
||||||
|
datasetId,
|
||||||
|
datasetDetail,
|
||||||
|
loadDatasetDetail,
|
||||||
|
updateDataset,
|
||||||
|
|
||||||
vectorTrainingMap,
|
vectorTrainingMap,
|
||||||
agentTrainingMap,
|
agentTrainingMap,
|
||||||
rebuildingCount,
|
rebuildingCount,
|
||||||
|
|||||||
@ -0,0 +1,11 @@
|
|||||||
|
import { ReactNode } from 'react';
|
||||||
|
import { createContext } from 'use-context-selector';
|
||||||
|
|
||||||
|
type ContextType = {};
|
||||||
|
|
||||||
|
export const Context = createContext<ContextType>({});
|
||||||
|
|
||||||
|
export const ContextProvider = ({ children }: { children: ReactNode }) => {
|
||||||
|
const contextValue: ContextType = {};
|
||||||
|
return <Context.Provider value={contextValue}>{children}</Context.Provider>;
|
||||||
|
};
|
||||||
@ -1,18 +0,0 @@
|
|||||||
import { ReactNode } from 'react';
|
|
||||||
import { createContext } from 'use-context-selector';
|
|
||||||
|
|
||||||
type DatasetContextType = {};
|
|
||||||
|
|
||||||
type DatasetContextValueType = {};
|
|
||||||
|
|
||||||
export const DatasetContext = createContext<DatasetContextType>({});
|
|
||||||
|
|
||||||
export const DatasetContextProvider = ({
|
|
||||||
children,
|
|
||||||
value
|
|
||||||
}: {
|
|
||||||
children: ReactNode;
|
|
||||||
value: DatasetContextValueType;
|
|
||||||
}) => {
|
|
||||||
return <DatasetContext.Provider value={value}>{children}</DatasetContext.Provider>;
|
|
||||||
};
|
|
||||||
@ -1,30 +1,18 @@
|
|||||||
import { create } from 'zustand';
|
import { create } from 'zustand';
|
||||||
import { devtools, persist } from 'zustand/middleware';
|
import { devtools, persist } from 'zustand/middleware';
|
||||||
import { immer } from 'zustand/middleware/immer';
|
import { immer } from 'zustand/middleware/immer';
|
||||||
import type { DatasetItemType, DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
import type {
|
||||||
import {
|
DatasetListItemType,
|
||||||
getAllDataset,
|
DatasetSimpleItemType
|
||||||
getDatasets,
|
} from '@fastgpt/global/core/dataset/type.d';
|
||||||
getDatasetById,
|
import { getAllDataset, getDatasets } from '@/web/core/dataset/api';
|
||||||
putDatasetById,
|
|
||||||
postWebsiteSync
|
|
||||||
} from '@/web/core/dataset/api';
|
|
||||||
import { defaultDatasetDetail } from '../constants';
|
|
||||||
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
|
|
||||||
import { DatasetStatusEnum } from '@fastgpt/global/core/dataset/constants';
|
|
||||||
import { postCreateTrainingUsage } from '@/web/support/wallet/usage/api';
|
|
||||||
import { checkTeamWebSyncLimit } from '@/web/support/user/team/api';
|
|
||||||
|
|
||||||
type State = {
|
type State = {
|
||||||
allDatasets: DatasetListItemType[];
|
allDatasets: DatasetSimpleItemType[];
|
||||||
loadAllDatasets: () => Promise<DatasetListItemType[]>;
|
loadAllDatasets: () => Promise<DatasetSimpleItemType[]>;
|
||||||
myDatasets: DatasetListItemType[];
|
myDatasets: DatasetListItemType[];
|
||||||
loadDatasets: (parentId?: string) => Promise<any>;
|
loadMyDatasets: (parentId?: string) => Promise<any>;
|
||||||
setDatasets(val: DatasetListItemType[]): void;
|
setMyDatasets(val: DatasetListItemType[]): void;
|
||||||
datasetDetail: DatasetItemType;
|
|
||||||
loadDatasetDetail: (id: string, init?: boolean) => Promise<DatasetItemType>;
|
|
||||||
updateDataset: (data: DatasetUpdateBody) => Promise<any>;
|
|
||||||
startWebsiteSync: () => Promise<any>;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useDatasetStore = create<State>()(
|
export const useDatasetStore = create<State>()(
|
||||||
@ -40,66 +28,17 @@ export const useDatasetStore = create<State>()(
|
|||||||
return res;
|
return res;
|
||||||
},
|
},
|
||||||
myDatasets: [],
|
myDatasets: [],
|
||||||
async loadDatasets(parentId = '') {
|
async loadMyDatasets(parentId = '') {
|
||||||
const res = await getDatasets({ parentId });
|
const res = await getDatasets({ parentId });
|
||||||
set((state) => {
|
set((state) => {
|
||||||
state.myDatasets = res;
|
state.myDatasets = res;
|
||||||
});
|
});
|
||||||
return res;
|
return res;
|
||||||
},
|
},
|
||||||
setDatasets(val) {
|
setMyDatasets(val) {
|
||||||
set((state) => {
|
set((state) => {
|
||||||
state.myDatasets = val;
|
state.myDatasets = val;
|
||||||
});
|
});
|
||||||
},
|
|
||||||
datasetDetail: defaultDatasetDetail,
|
|
||||||
async loadDatasetDetail(id: string, init = false) {
|
|
||||||
if (!id || (id === get().datasetDetail._id && !init)) return get().datasetDetail;
|
|
||||||
|
|
||||||
const data = await getDatasetById(id);
|
|
||||||
|
|
||||||
set((state) => {
|
|
||||||
state.datasetDetail = data;
|
|
||||||
});
|
|
||||||
|
|
||||||
return data;
|
|
||||||
},
|
|
||||||
async updateDataset(data) {
|
|
||||||
await putDatasetById(data);
|
|
||||||
|
|
||||||
if (get().datasetDetail._id === data.id) {
|
|
||||||
set((state) => {
|
|
||||||
state.datasetDetail = {
|
|
||||||
...get().datasetDetail,
|
|
||||||
...data
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
set((state) => {
|
|
||||||
state.myDatasets = state.myDatasets = state.myDatasets.map((item) =>
|
|
||||||
item._id === data.id
|
|
||||||
? {
|
|
||||||
...item,
|
|
||||||
...data
|
|
||||||
}
|
|
||||||
: item
|
|
||||||
);
|
|
||||||
});
|
|
||||||
},
|
|
||||||
async startWebsiteSync() {
|
|
||||||
await checkTeamWebSyncLimit();
|
|
||||||
|
|
||||||
const billId = await postCreateTrainingUsage({
|
|
||||||
name: 'core.dataset.training.Website Sync',
|
|
||||||
datasetId: get().datasetDetail._id
|
|
||||||
});
|
|
||||||
|
|
||||||
return postWebsiteSync({ datasetId: get().datasetDetail._id, billId }).then(() => {
|
|
||||||
get().updateDataset({
|
|
||||||
id: get().datasetDetail._id,
|
|
||||||
status: DatasetStatusEnum.syncing
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
})),
|
})),
|
||||||
{
|
{
|
||||||
|
|||||||
13
projects/app/src/web/core/dataset/type.d.ts
vendored
13
projects/app/src/web/core/dataset/type.d.ts
vendored
@ -1,13 +1,8 @@
|
|||||||
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
|
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
|
||||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { ImportProcessWayEnum, ImportSourceTypeEnum } from './constants';
|
import { ImportProcessWayEnum } from './constants';
|
||||||
import { UseFormReturn } from 'react-hook-form';
|
import { UseFormReturn } from 'react-hook-form';
|
||||||
|
|
||||||
export type ImportDataComponentProps = {
|
|
||||||
activeStep: number;
|
|
||||||
goToNext: () => void;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ImportSourceItemType = {
|
export type ImportSourceItemType = {
|
||||||
id: string;
|
id: string;
|
||||||
|
|
||||||
@ -17,10 +12,10 @@ export type ImportSourceItemType = {
|
|||||||
|
|
||||||
// source
|
// source
|
||||||
sourceName: string;
|
sourceName: string;
|
||||||
sourceSize?: string;
|
|
||||||
icon: string;
|
icon: string;
|
||||||
|
|
||||||
// file
|
// file
|
||||||
|
sourceSize?: string;
|
||||||
isUploading?: boolean;
|
isUploading?: boolean;
|
||||||
uploadedFileRate?: number;
|
uploadedFileRate?: number;
|
||||||
dbFileId?: string; // 存储在数据库里的文件Id,这个 ID 还是图片和集合的 metadata 中 relateId
|
dbFileId?: string; // 存储在数据库里的文件Id,这个 ID 还是图片和集合的 metadata 中 relateId
|
||||||
@ -31,6 +26,10 @@ export type ImportSourceItemType = {
|
|||||||
|
|
||||||
// custom text
|
// custom text
|
||||||
rawText?: string;
|
rawText?: string;
|
||||||
|
|
||||||
|
// external file
|
||||||
|
sourceUrl?: string;
|
||||||
|
externalId?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ImportSourceParamsType = UseFormReturn<
|
export type ImportSourceParamsType = UseFormReturn<
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user