Dataset Permission (#1786)
* feat: dataset controllers feat: dataset schema fix: add missing type to dataset schema Signed-off-by: FinleyGe <m13203533462@163.com> * feat: dataset list api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: all dataset api Signed-off-by: FinleyGe <m13203533462@163.com> * feat: new auth dataset method Signed-off-by: FinleyGe <m13203533462@163.com> * chore: use new auth method in detail, paths. feat: add new param defaultPermission to create api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: app auth params Signed-off-by: FinleyGe <m13203533462@163.com> * chore: use new auth method Signed-off-by: FinleyGe <m13203533462@163.com> * feat: new auth collection and file method Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset collection api new auth Signed-off-by: FinleyGe <m13203533462@163.com> * chore: create/*.ts auth Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset auth Signed-off-by: FinleyGe <m13203533462@163.com> * fix: import paths Signed-off-by: FinleyGe <m13203533462@163.com> * feat: dataset collaborator Signed-off-by: FinleyGe <m13203533462@163.com> * chore: dataset frontend feat: dataset list frontend feat: dataset detail Signed-off-by: FinleyGe <m13203533462@163.com> * feat: finish the dataset permission fix: ts errors Signed-off-by: FinleyGe <m13203533462@163.com> * fix: empty response of collection api Signed-off-by: FinleyGe <m13203533462@163.com> * chore: adjust the code * chore: adjust the code * chore: i18n * fix: ts error * fix: fe CollectionCard permission --------- Signed-off-by: FinleyGe <m13203533462@163.com>
This commit is contained in:
parent
2b25e3cc2d
commit
980b4d3db5
@ -4,7 +4,8 @@ import { ErrType } from '../errorCode';
|
|||||||
const startCode = 507000;
|
const startCode = 507000;
|
||||||
export enum CommonErrEnum {
|
export enum CommonErrEnum {
|
||||||
fileNotFound = 'fileNotFound',
|
fileNotFound = 'fileNotFound',
|
||||||
unAuthFile = 'unAuthFile'
|
unAuthFile = 'unAuthFile',
|
||||||
|
missingParams = 'missingParams'
|
||||||
}
|
}
|
||||||
const datasetErr = [
|
const datasetErr = [
|
||||||
{
|
{
|
||||||
@ -14,6 +15,10 @@ const datasetErr = [
|
|||||||
{
|
{
|
||||||
statusText: CommonErrEnum.unAuthFile,
|
statusText: CommonErrEnum.unAuthFile,
|
||||||
message: 'error.unAuthFile'
|
message: 'error.unAuthFile'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusText: CommonErrEnum.missingParams,
|
||||||
|
message: 'error.missingParams'
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
export default datasetErr.reduce((acc, cur, index) => {
|
export default datasetErr.reduce((acc, cur, index) => {
|
||||||
|
|||||||
@ -2,6 +2,7 @@ import { ErrType } from '../errorCode';
|
|||||||
|
|
||||||
/* dataset: 501000 */
|
/* dataset: 501000 */
|
||||||
export enum DatasetErrEnum {
|
export enum DatasetErrEnum {
|
||||||
|
unExist = 'unExistDataset',
|
||||||
unAuthDataset = 'unAuthDataset',
|
unAuthDataset = 'unAuthDataset',
|
||||||
unCreateCollection = 'unCreateCollection',
|
unCreateCollection = 'unCreateCollection',
|
||||||
unAuthDatasetCollection = 'unAuthDatasetCollection',
|
unAuthDatasetCollection = 'unAuthDatasetCollection',
|
||||||
@ -11,6 +12,10 @@ export enum DatasetErrEnum {
|
|||||||
unLinkCollection = 'unLinkCollection'
|
unLinkCollection = 'unLinkCollection'
|
||||||
}
|
}
|
||||||
const datasetErr = [
|
const datasetErr = [
|
||||||
|
{
|
||||||
|
statusText: DatasetErrEnum.unExist,
|
||||||
|
message: 'core.dataset.error.unExistDataset'
|
||||||
|
},
|
||||||
{
|
{
|
||||||
statusText: DatasetErrEnum.unAuthDataset,
|
statusText: DatasetErrEnum.unAuthDataset,
|
||||||
message: 'core.dataset.error.unAuthDataset'
|
message: 'core.dataset.error.unAuthDataset'
|
||||||
|
|||||||
3
packages/global/core/dataset/api.d.ts
vendored
3
packages/global/core/dataset/api.d.ts
vendored
@ -9,12 +9,13 @@ export type DatasetUpdateBody = {
|
|||||||
name?: string;
|
name?: string;
|
||||||
avatar?: string;
|
avatar?: string;
|
||||||
intro?: string;
|
intro?: string;
|
||||||
permission?: DatasetSchemaType['permission'];
|
permission?: DatasetSchemaType['permission']; // TODO: Should be deleted.
|
||||||
agentModel?: LLMModelItemType;
|
agentModel?: LLMModelItemType;
|
||||||
status?: DatasetSchemaType['status'];
|
status?: DatasetSchemaType['status'];
|
||||||
|
|
||||||
websiteConfig?: DatasetSchemaType['websiteConfig'];
|
websiteConfig?: DatasetSchemaType['websiteConfig'];
|
||||||
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
|
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
|
||||||
|
defaultPermission?: DatasetSchemaType['defaultPermission'];
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ================= collection ===================== */
|
/* ================= collection ===================== */
|
||||||
|
|||||||
11
packages/global/core/dataset/collaborator.d.ts
vendored
Normal file
11
packages/global/core/dataset/collaborator.d.ts
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { UpdateClbPermissionProps } from '../../support/permission/collaborator';
|
||||||
|
import { PermissionValueType } from '../../support/permission/type';
|
||||||
|
|
||||||
|
export type UpdateDatasetCollaboratorBody = UpdateClbPermissionProps & {
|
||||||
|
datasetId: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DatasetCollaboratorDeleteParams = {
|
||||||
|
datasetId: string;
|
||||||
|
tmbId: string;
|
||||||
|
};
|
||||||
16
packages/global/core/dataset/type.d.ts
vendored
16
packages/global/core/dataset/type.d.ts
vendored
@ -1,3 +1,4 @@
|
|||||||
|
import { PermissionValueType } from 'support/permission/type';
|
||||||
import type { LLMModelItemType, VectorModelItemType } from '../../core/ai/model.d';
|
import type { LLMModelItemType, VectorModelItemType } from '../../core/ai/model.d';
|
||||||
import { PermissionTypeEnum } from '../../support/permission/constant';
|
import { PermissionTypeEnum } from '../../support/permission/constant';
|
||||||
import { PushDatasetDataChunkProps } from './api';
|
import { PushDatasetDataChunkProps } from './api';
|
||||||
@ -8,6 +9,8 @@ import {
|
|||||||
SearchScoreTypeEnum,
|
SearchScoreTypeEnum,
|
||||||
TrainingModeEnum
|
TrainingModeEnum
|
||||||
} from './constants';
|
} from './constants';
|
||||||
|
import { DatasetPermission } from '../../support/permission/dataset/controller';
|
||||||
|
import { Permission } from '../../support/permission/controller';
|
||||||
|
|
||||||
/* schema */
|
/* schema */
|
||||||
export type DatasetSchemaType = {
|
export type DatasetSchemaType = {
|
||||||
@ -24,7 +27,7 @@ export type DatasetSchemaType = {
|
|||||||
intro: string;
|
intro: string;
|
||||||
type: DatasetTypeEnum;
|
type: DatasetTypeEnum;
|
||||||
status: `${DatasetStatusEnum}`;
|
status: `${DatasetStatusEnum}`;
|
||||||
permission: `${PermissionTypeEnum}`;
|
permission: DatasetPermission;
|
||||||
|
|
||||||
// metadata
|
// metadata
|
||||||
websiteConfig?: {
|
websiteConfig?: {
|
||||||
@ -32,6 +35,7 @@ export type DatasetSchemaType = {
|
|||||||
selector: string;
|
selector: string;
|
||||||
};
|
};
|
||||||
externalReadUrl?: string;
|
externalReadUrl?: string;
|
||||||
|
defaultPermission: PermissionValueType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DatasetCollectionSchemaType = {
|
export type DatasetCollectionSchemaType = {
|
||||||
@ -132,24 +136,22 @@ export type DatasetListItemType = {
|
|||||||
name: string;
|
name: string;
|
||||||
intro: string;
|
intro: string;
|
||||||
type: DatasetTypeEnum;
|
type: DatasetTypeEnum;
|
||||||
isOwner: boolean;
|
permission: DatasetPermission;
|
||||||
canWrite: boolean;
|
|
||||||
permission: `${PermissionTypeEnum}`;
|
|
||||||
vectorModel: VectorModelItemType;
|
vectorModel: VectorModelItemType;
|
||||||
|
defaultPermission: PermissionValueType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DatasetItemType = Omit<DatasetSchemaType, 'vectorModel' | 'agentModel'> & {
|
export type DatasetItemType = Omit<DatasetSchemaType, 'vectorModel' | 'agentModel'> & {
|
||||||
vectorModel: VectorModelItemType;
|
vectorModel: VectorModelItemType;
|
||||||
agentModel: LLMModelItemType;
|
agentModel: LLMModelItemType;
|
||||||
isOwner: boolean;
|
|
||||||
canWrite: boolean;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ================= collection ===================== */
|
/* ================= collection ===================== */
|
||||||
export type DatasetCollectionItemType = CollectionWithDatasetType & {
|
export type DatasetCollectionItemType = CollectionWithDatasetType & {
|
||||||
canWrite: boolean;
|
|
||||||
sourceName: string;
|
sourceName: string;
|
||||||
sourceId?: string;
|
sourceId?: string;
|
||||||
file?: DatasetFileSchema;
|
file?: DatasetFileSchema;
|
||||||
|
permission: DatasetPermission;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ================= data ===================== */
|
/* ================= data ===================== */
|
||||||
|
|||||||
20
packages/global/support/permission/dataset/constant.ts
Normal file
20
packages/global/support/permission/dataset/constant.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import { NullPermission, PermissionKeyEnum, PermissionList } from '../constant';
|
||||||
|
|
||||||
|
export enum DatasetPermissionKeyEnum {}
|
||||||
|
|
||||||
|
export const DatasetPermissionList = {
|
||||||
|
[PermissionKeyEnum.read]: {
|
||||||
|
...PermissionList[PermissionKeyEnum.read],
|
||||||
|
description: '可查看知识库内容'
|
||||||
|
},
|
||||||
|
[PermissionKeyEnum.write]: {
|
||||||
|
...PermissionList[PermissionKeyEnum.write],
|
||||||
|
description: '可增加和变更知识库内容'
|
||||||
|
},
|
||||||
|
[PermissionKeyEnum.manage]: {
|
||||||
|
...PermissionList[PermissionKeyEnum.manage],
|
||||||
|
description: '可管理整个知识库数据和信息'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DatasetDefaultPermission = NullPermission;
|
||||||
14
packages/global/support/permission/dataset/controller.ts
Normal file
14
packages/global/support/permission/dataset/controller.ts
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { NullPermission } from '../constant';
|
||||||
|
import { PerConstructPros, Permission } from '../controller';
|
||||||
|
export class DatasetPermission extends Permission {
|
||||||
|
constructor(props?: PerConstructPros) {
|
||||||
|
if (!props) {
|
||||||
|
props = {
|
||||||
|
per: NullPermission
|
||||||
|
};
|
||||||
|
} else if (!props?.per) {
|
||||||
|
props.per = NullPermission;
|
||||||
|
}
|
||||||
|
super(props);
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -12,6 +12,7 @@ import {
|
|||||||
TeamMemberCollectionName
|
TeamMemberCollectionName
|
||||||
} from '@fastgpt/global/support/user/team/constant';
|
} from '@fastgpt/global/support/user/team/constant';
|
||||||
import { PermissionTypeEnum, PermissionTypeMap } from '@fastgpt/global/support/permission/constant';
|
import { PermissionTypeEnum, PermissionTypeMap } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
|
||||||
|
|
||||||
export const DatasetCollectionName = 'datasets';
|
export const DatasetCollectionName = 'datasets';
|
||||||
|
|
||||||
@ -90,7 +91,11 @@ const DatasetSchema = new Schema({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
externalReadUrl: String
|
externalReadUrl: String,
|
||||||
|
defaultPermission: {
|
||||||
|
type: Number,
|
||||||
|
default: DatasetDefaultPermission
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -39,7 +39,6 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
|||||||
if (source === PluginSourceEnum.personal) {
|
if (source === PluginSourceEnum.personal) {
|
||||||
await authAppByTmbId({
|
await authAppByTmbId({
|
||||||
appId: pluginId,
|
appId: pluginId,
|
||||||
teamId: workflowApp.teamId,
|
|
||||||
tmbId: workflowApp.tmbId,
|
tmbId: workflowApp.tmbId,
|
||||||
per: ReadPermissionVal
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
|||||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type/index.d';
|
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type/index.d';
|
||||||
import { SelectAppItemType } from '@fastgpt/global/core/workflow/type/index.d';
|
import { SelectAppItemType } from '@fastgpt/global/core/workflow/type/index.d';
|
||||||
import { dispatchWorkFlow } from '../index';
|
import { dispatchWorkFlow } from '../index';
|
||||||
import { MongoApp } from '../../../../core/app/schema';
|
|
||||||
import { responseWrite } from '../../../../common/response';
|
import { responseWrite } from '../../../../common/response';
|
||||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||||
@ -48,7 +47,6 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
|||||||
// 检查该工作流的tmb是否有调用该app的权限(不是校验对话的人,是否有权限)
|
// 检查该工作流的tmb是否有调用该app的权限(不是校验对话的人,是否有权限)
|
||||||
const { app: appData } = await authAppByTmbId({
|
const { app: appData } = await authAppByTmbId({
|
||||||
appId: app.id,
|
appId: app.id,
|
||||||
teamId: workflowApp.teamId,
|
|
||||||
tmbId: workflowApp.tmbId,
|
tmbId: workflowApp.tmbId,
|
||||||
per: ReadPermissionVal
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|||||||
@ -12,17 +12,15 @@ import { AuthResponseType } from '../type/auth.d';
|
|||||||
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
|
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
|
||||||
|
|
||||||
export const authAppByTmbId = async ({
|
export const authAppByTmbId = async ({
|
||||||
teamId,
|
|
||||||
tmbId,
|
tmbId,
|
||||||
appId,
|
appId,
|
||||||
per
|
per
|
||||||
}: {
|
}: {
|
||||||
teamId: string;
|
|
||||||
tmbId: string;
|
tmbId: string;
|
||||||
appId: string;
|
appId: string;
|
||||||
per: PermissionValueType;
|
per: PermissionValueType;
|
||||||
}) => {
|
}) => {
|
||||||
const { permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
|
const { teamId, permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
|
||||||
|
|
||||||
const app = await (async () => {
|
const app = await (async () => {
|
||||||
// get app and per
|
// get app and per
|
||||||
@ -68,10 +66,9 @@ export const authApp = async ({
|
|||||||
}
|
}
|
||||||
> => {
|
> => {
|
||||||
const result = await parseHeaderCert(props);
|
const result = await parseHeaderCert(props);
|
||||||
const { teamId, tmbId } = result;
|
const { tmbId } = result;
|
||||||
|
|
||||||
const { app } = await authAppByTmbId({
|
const { app } = await authAppByTmbId({
|
||||||
teamId,
|
|
||||||
tmbId,
|
tmbId,
|
||||||
appId,
|
appId,
|
||||||
per
|
per
|
||||||
|
|||||||
@ -1,201 +0,0 @@
|
|||||||
import { AuthModeType } from '../type';
|
|
||||||
import { parseHeaderCert } from '../controller';
|
|
||||||
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
|
||||||
import { MongoDataset } from '../../../core/dataset/schema';
|
|
||||||
import { getCollectionWithDataset } from '../../../core/dataset/controller';
|
|
||||||
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
|
|
||||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
|
||||||
import { AuthResponseType } from '@fastgpt/global/support/permission/type';
|
|
||||||
import {
|
|
||||||
CollectionWithDatasetType,
|
|
||||||
DatasetFileSchema,
|
|
||||||
DatasetSchemaType
|
|
||||||
} from '@fastgpt/global/core/dataset/type';
|
|
||||||
import { getFileById } from '../../../common/file/gridfs/controller';
|
|
||||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
|
||||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
|
||||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
|
||||||
import { MongoDatasetCollection } from '../../../core/dataset/collection/schema';
|
|
||||||
|
|
||||||
export async function authDatasetByTmbId({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
datasetId,
|
|
||||||
per
|
|
||||||
}: {
|
|
||||||
teamId: string;
|
|
||||||
tmbId: string;
|
|
||||||
datasetId: string;
|
|
||||||
per: AuthModeType['per'];
|
|
||||||
}) {
|
|
||||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
|
||||||
|
|
||||||
const { dataset, isOwner, canWrite } = await (async () => {
|
|
||||||
const dataset = await MongoDataset.findOne({ _id: datasetId, teamId }).lean();
|
|
||||||
|
|
||||||
if (!dataset) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isOwner =
|
|
||||||
role !== TeamMemberRoleEnum.visitor &&
|
|
||||||
(String(dataset.tmbId) === tmbId || role === TeamMemberRoleEnum.owner);
|
|
||||||
const canWrite =
|
|
||||||
isOwner ||
|
|
||||||
(role !== TeamMemberRoleEnum.visitor && dataset.permission === PermissionTypeEnum.public);
|
|
||||||
if (per === 'r') {
|
|
||||||
if (!isOwner && dataset.permission !== PermissionTypeEnum.public) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (per === 'w' && !canWrite) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
||||||
}
|
|
||||||
if (per === 'owner' && !isOwner) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { dataset, isOwner, canWrite };
|
|
||||||
})();
|
|
||||||
|
|
||||||
return {
|
|
||||||
dataset,
|
|
||||||
isOwner,
|
|
||||||
canWrite
|
|
||||||
};
|
|
||||||
}
|
|
||||||
export async function authDataset({
|
|
||||||
datasetId,
|
|
||||||
per = 'owner',
|
|
||||||
...props
|
|
||||||
}: AuthModeType & {
|
|
||||||
datasetId: string;
|
|
||||||
}): Promise<
|
|
||||||
AuthResponseType & {
|
|
||||||
dataset: DatasetSchemaType;
|
|
||||||
}
|
|
||||||
> {
|
|
||||||
const result = await parseHeaderCert(props);
|
|
||||||
const { teamId, tmbId } = result;
|
|
||||||
const { dataset, isOwner, canWrite } = await authDatasetByTmbId({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
datasetId,
|
|
||||||
per
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
...result,
|
|
||||||
dataset,
|
|
||||||
isOwner,
|
|
||||||
canWrite
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
Read: in team and dataset permission is public
|
|
||||||
Write: in team, not visitor and dataset permission is public
|
|
||||||
*/
|
|
||||||
export async function authDatasetCollection({
|
|
||||||
collectionId,
|
|
||||||
per = 'owner',
|
|
||||||
...props
|
|
||||||
}: AuthModeType & {
|
|
||||||
collectionId: string;
|
|
||||||
}): Promise<
|
|
||||||
AuthResponseType & {
|
|
||||||
collection: CollectionWithDatasetType;
|
|
||||||
}
|
|
||||||
> {
|
|
||||||
const { teamId, tmbId } = await parseHeaderCert(props);
|
|
||||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
|
||||||
|
|
||||||
const { collection, isOwner, canWrite } = await (async () => {
|
|
||||||
const collection = await getCollectionWithDataset(collectionId);
|
|
||||||
|
|
||||||
if (!collection || String(collection.teamId) !== teamId) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isOwner = String(collection.tmbId) === tmbId || role === TeamMemberRoleEnum.owner;
|
|
||||||
const canWrite =
|
|
||||||
isOwner ||
|
|
||||||
(role !== TeamMemberRoleEnum.visitor &&
|
|
||||||
collection.datasetId.permission === PermissionTypeEnum.public);
|
|
||||||
|
|
||||||
if (per === 'r') {
|
|
||||||
if (!isOwner && collection.datasetId.permission !== PermissionTypeEnum.public) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (per === 'w' && !canWrite) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
|
|
||||||
}
|
|
||||||
if (per === 'owner' && !isOwner) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
collection,
|
|
||||||
isOwner,
|
|
||||||
canWrite
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
return {
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
collection,
|
|
||||||
isOwner,
|
|
||||||
canWrite
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function authDatasetFile({
|
|
||||||
fileId,
|
|
||||||
per = 'owner',
|
|
||||||
...props
|
|
||||||
}: AuthModeType & {
|
|
||||||
fileId: string;
|
|
||||||
}): Promise<
|
|
||||||
AuthResponseType & {
|
|
||||||
file: DatasetFileSchema;
|
|
||||||
}
|
|
||||||
> {
|
|
||||||
const { teamId, tmbId } = await parseHeaderCert(props);
|
|
||||||
|
|
||||||
const [file, collection] = await Promise.all([
|
|
||||||
getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
|
|
||||||
MongoDatasetCollection.findOne({
|
|
||||||
teamId,
|
|
||||||
fileId
|
|
||||||
})
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (!file) {
|
|
||||||
return Promise.reject(CommonErrEnum.fileNotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!collection) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
// file role = collection role
|
|
||||||
try {
|
|
||||||
const { isOwner, canWrite } = await authDatasetCollection({
|
|
||||||
...props,
|
|
||||||
collectionId: collection._id,
|
|
||||||
per
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
file,
|
|
||||||
isOwner,
|
|
||||||
canWrite
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
213
packages/service/support/permission/dataset/auth.ts
Normal file
213
packages/service/support/permission/dataset/auth.ts
Normal file
@ -0,0 +1,213 @@
|
|||||||
|
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
|
||||||
|
import { getResourcePermission, parseHeaderCert } from '../controller';
|
||||||
|
import { AuthPropsType, AuthResponseType } from '../type/auth';
|
||||||
|
import {
|
||||||
|
CollectionWithDatasetType,
|
||||||
|
DatasetDataItemType,
|
||||||
|
DatasetFileSchema,
|
||||||
|
DatasetSchemaType
|
||||||
|
} from '@fastgpt/global/core/dataset/type';
|
||||||
|
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||||
|
import { MongoDataset } from '../../../core/dataset/schema';
|
||||||
|
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
||||||
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||||
|
import { getCollectionWithDataset } from '../../../core/dataset/controller';
|
||||||
|
import { MongoDatasetCollection } from '../../../core/dataset/collection/schema';
|
||||||
|
import { getFileById } from '../../../common/file/gridfs/controller';
|
||||||
|
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
import { MongoDatasetData } from '../../../core/dataset/data/schema';
|
||||||
|
|
||||||
|
export async function authDatasetByTmbId({
|
||||||
|
tmbId,
|
||||||
|
datasetId,
|
||||||
|
per
|
||||||
|
}: {
|
||||||
|
tmbId: string;
|
||||||
|
datasetId: string;
|
||||||
|
per: PermissionValueType;
|
||||||
|
}) {
|
||||||
|
const { teamId, permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
|
||||||
|
|
||||||
|
const dataset = await (async () => {
|
||||||
|
// get app and per
|
||||||
|
const [dataset, rp] = await Promise.all([
|
||||||
|
MongoDataset.findOne({ _id: datasetId, teamId }).lean(),
|
||||||
|
getResourcePermission({
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
resourceId: datasetId,
|
||||||
|
resourceType: PerResourceTypeEnum.dataset
|
||||||
|
}) // this could be null
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!dataset) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unExist);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isOwner = tmbPer.isOwner || String(dataset.tmbId) === tmbId;
|
||||||
|
const Per = new DatasetPermission({
|
||||||
|
per: rp?.permission ?? dataset.defaultPermission,
|
||||||
|
isOwner
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!Per.checkPer(per)) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...dataset,
|
||||||
|
permission: Per
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
return { dataset: dataset };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auth Dataset
|
||||||
|
export async function authDataset({
|
||||||
|
datasetId,
|
||||||
|
per,
|
||||||
|
...props
|
||||||
|
}: AuthPropsType & {
|
||||||
|
datasetId: string;
|
||||||
|
}): Promise<
|
||||||
|
AuthResponseType<DatasetPermission> & {
|
||||||
|
dataset: DatasetSchemaType;
|
||||||
|
}
|
||||||
|
> {
|
||||||
|
const { teamId, tmbId } = await parseHeaderCert(props);
|
||||||
|
|
||||||
|
const { dataset } = await authDatasetByTmbId({
|
||||||
|
tmbId,
|
||||||
|
datasetId,
|
||||||
|
per
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
dataset,
|
||||||
|
permission: dataset.permission
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// the temporary solution for authDatasetCollection is getting the
|
||||||
|
export async function authDatasetCollection({
|
||||||
|
collectionId,
|
||||||
|
per,
|
||||||
|
...props
|
||||||
|
}: AuthPropsType & {
|
||||||
|
collectionId: string;
|
||||||
|
}): Promise<
|
||||||
|
AuthResponseType<DatasetPermission> & {
|
||||||
|
collection: CollectionWithDatasetType;
|
||||||
|
}
|
||||||
|
> {
|
||||||
|
const { teamId, tmbId } = await parseHeaderCert(props);
|
||||||
|
const collection = await getCollectionWithDataset(collectionId);
|
||||||
|
|
||||||
|
if (!collection) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unExist);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { dataset } = await authDatasetByTmbId({
|
||||||
|
tmbId,
|
||||||
|
datasetId: collection.datasetId._id,
|
||||||
|
per
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
collection,
|
||||||
|
permission: dataset.permission
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function authDatasetFile({
|
||||||
|
fileId,
|
||||||
|
per,
|
||||||
|
...props
|
||||||
|
}: AuthPropsType & {
|
||||||
|
fileId: string;
|
||||||
|
}): Promise<
|
||||||
|
AuthResponseType<DatasetPermission> & {
|
||||||
|
file: DatasetFileSchema;
|
||||||
|
}
|
||||||
|
> {
|
||||||
|
const { teamId, tmbId } = await parseHeaderCert(props);
|
||||||
|
|
||||||
|
const [file, collection] = await Promise.all([
|
||||||
|
getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
|
||||||
|
MongoDatasetCollection.findOne({
|
||||||
|
teamId,
|
||||||
|
fileId
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!file) {
|
||||||
|
return Promise.reject(CommonErrEnum.fileNotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!collection) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { permission } = await authDatasetCollection({
|
||||||
|
...props,
|
||||||
|
collectionId: collection._id,
|
||||||
|
per
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
file,
|
||||||
|
permission
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function authDatasetData({
|
||||||
|
dataId,
|
||||||
|
...props
|
||||||
|
}: AuthPropsType & {
|
||||||
|
dataId: string;
|
||||||
|
}) {
|
||||||
|
// get mongo dataset.data
|
||||||
|
const datasetData = await MongoDatasetData.findById(dataId);
|
||||||
|
|
||||||
|
if (!datasetData) {
|
||||||
|
return Promise.reject('core.dataset.error.Data not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await authDatasetCollection({
|
||||||
|
...props,
|
||||||
|
collectionId: datasetData.collectionId
|
||||||
|
});
|
||||||
|
|
||||||
|
const data: DatasetDataItemType = {
|
||||||
|
id: String(datasetData._id),
|
||||||
|
teamId: datasetData.teamId,
|
||||||
|
q: datasetData.q,
|
||||||
|
a: datasetData.a,
|
||||||
|
chunkIndex: datasetData.chunkIndex,
|
||||||
|
indexes: datasetData.indexes,
|
||||||
|
datasetId: String(datasetData.datasetId),
|
||||||
|
collectionId: String(datasetData.collectionId),
|
||||||
|
sourceName: result.collection.name || '',
|
||||||
|
sourceId: result.collection?.fileId || result.collection?.rawLink,
|
||||||
|
isOwner: String(datasetData.tmbId) === result.tmbId,
|
||||||
|
canWrite: result.permission.hasWritePer
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
datasetData: data
|
||||||
|
};
|
||||||
|
}
|
||||||
@ -31,7 +31,6 @@ export async function authOutLinkCrud({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { app } = await authAppByTmbId({
|
const { app } = await authAppByTmbId({
|
||||||
teamId,
|
|
||||||
tmbId,
|
tmbId,
|
||||||
appId: outLink.appId,
|
appId: outLink.appId,
|
||||||
per: ManagePermissionVal
|
per: ManagePermissionVal
|
||||||
|
|||||||
@ -11,11 +11,11 @@ export type AuthPropsType = {
|
|||||||
per: PermissionValueType;
|
per: PermissionValueType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AuthResponseType = {
|
export type AuthResponseType<T = Permission> = {
|
||||||
teamId: string;
|
teamId: string;
|
||||||
tmbId: string;
|
tmbId: string;
|
||||||
authType?: `${AuthUserTypeEnum}`;
|
authType?: `${AuthUserTypeEnum}`;
|
||||||
appId?: string;
|
appId?: string;
|
||||||
apikey?: string;
|
apikey?: string;
|
||||||
permission: Permission;
|
permission: T;
|
||||||
};
|
};
|
||||||
|
|||||||
18489
pnpm-lock.yaml
generated
18489
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -1,9 +1,9 @@
|
|||||||
/** @type {import('next').NextConfig} */
|
|
||||||
const { i18n } = require('./next-i18next.config');
|
const { i18n } = require('./next-i18next.config');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
const isDev = process.env.NODE_ENV === 'development';
|
const isDev = process.env.NODE_ENV === 'development';
|
||||||
|
|
||||||
|
/** @type {import('next').NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
i18n,
|
i18n,
|
||||||
output: 'standalone',
|
output: 'standalone',
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import {
|
|||||||
SearchDataResponseItemType
|
SearchDataResponseItemType
|
||||||
} from '@fastgpt/global/core/dataset/type';
|
} from '@fastgpt/global/core/dataset/type';
|
||||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||||
|
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
|
||||||
|
|
||||||
/* ================= dataset ===================== */
|
/* ================= dataset ===================== */
|
||||||
export type CreateDatasetParams = {
|
export type CreateDatasetParams = {
|
||||||
@ -21,6 +22,7 @@ export type CreateDatasetParams = {
|
|||||||
avatar: string;
|
avatar: string;
|
||||||
vectorModel?: string;
|
vectorModel?: string;
|
||||||
agentModel?: string;
|
agentModel?: string;
|
||||||
|
defaultPermission?: PermissionValueType;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type RebuildEmbeddingProps = {
|
export type RebuildEmbeddingProps = {
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import {
|
|||||||
DatasetCollectionSchemaType,
|
DatasetCollectionSchemaType,
|
||||||
DatasetDataSchemaType
|
DatasetDataSchemaType
|
||||||
} from '@fastgpt/global/core/dataset/type.d';
|
} from '@fastgpt/global/core/dataset/type.d';
|
||||||
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||||
|
|
||||||
/* ================= dataset ===================== */
|
/* ================= dataset ===================== */
|
||||||
|
|
||||||
@ -18,7 +19,7 @@ export type DatasetCollectionsListItemType = {
|
|||||||
trainingAmount: number;
|
trainingAmount: number;
|
||||||
fileId?: string;
|
fileId?: string;
|
||||||
rawLink?: string;
|
rawLink?: string;
|
||||||
canWrite: boolean;
|
permission: DatasetPermission;
|
||||||
};
|
};
|
||||||
|
|
||||||
/* ================= data ===================== */
|
/* ================= data ===================== */
|
||||||
|
|||||||
@ -1,31 +1,66 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
|
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
|
||||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
|
||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
import {
|
||||||
|
PerResourceTypeEnum,
|
||||||
|
ReadPermissionVal
|
||||||
|
} from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
|
||||||
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||||
|
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||||
|
|
||||||
/* get all dataset by teamId or tmbId */
|
/* get all dataset by teamId or tmbId */
|
||||||
async function handler(
|
async function handler(req: NextApiRequest): Promise<DatasetSimpleItemType[]> {
|
||||||
req: NextApiRequest,
|
const {
|
||||||
res: NextApiResponse<any>
|
teamId,
|
||||||
): Promise<DatasetSimpleItemType[]> {
|
tmbId,
|
||||||
// 凭证校验
|
permission: tmbPer
|
||||||
const { teamId, tmbId, permission } = await authUserPer({
|
} = await authUserPer({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
per: ReadPermissionVal
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const datasets = await MongoDataset.find({
|
const [myDatasets, rpList] = await Promise.all([
|
||||||
...mongoRPermission({ teamId, tmbId, permission }),
|
MongoDataset.find({
|
||||||
type: { $ne: DatasetTypeEnum.folder }
|
teamId,
|
||||||
}).lean();
|
type: {
|
||||||
|
$ne: DatasetTypeEnum.folder
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.sort({
|
||||||
|
updateTime: -1
|
||||||
|
})
|
||||||
|
.lean(),
|
||||||
|
MongoResourcePermission.find({
|
||||||
|
resourceType: PerResourceTypeEnum.dataset,
|
||||||
|
teamId,
|
||||||
|
tmbId
|
||||||
|
}).lean()
|
||||||
|
]);
|
||||||
|
|
||||||
return datasets.map((item) => ({
|
const filterDatasets = myDatasets
|
||||||
|
.map((dataset) => {
|
||||||
|
const perVal = rpList.find(
|
||||||
|
(item) => String(item.resourceId) === String(dataset._id)
|
||||||
|
)?.permission;
|
||||||
|
const Per = new DatasetPermission({
|
||||||
|
per: perVal ?? dataset.defaultPermission,
|
||||||
|
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
...dataset,
|
||||||
|
permission: Per
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.filter((app) => app.permission.hasReadPer);
|
||||||
|
|
||||||
|
return filterDatasets.map((item) => ({
|
||||||
_id: item._id,
|
_id: item._id,
|
||||||
avatar: item.avatar,
|
avatar: item.avatar,
|
||||||
name: item.name,
|
name: item.name,
|
||||||
|
|||||||
@ -1,39 +1,27 @@
|
|||||||
/*
|
import type { NextApiRequest } from 'next';
|
||||||
Create one dataset collection
|
|
||||||
*/
|
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const body = req.body as CreateDatasetCollectionParams;
|
||||||
await connectToDatabase();
|
|
||||||
const body = req.body as CreateDatasetCollectionParams;
|
|
||||||
|
|
||||||
const { teamId, tmbId } = await authDataset({
|
const { teamId, tmbId } = await authDataset({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
datasetId: body.datasetId,
|
datasetId: body.datasetId,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const { _id } = await createOneCollection({
|
const { _id } = await createOneCollection({
|
||||||
...body,
|
...body,
|
||||||
teamId,
|
teamId,
|
||||||
tmbId
|
tmbId
|
||||||
});
|
});
|
||||||
|
return _id;
|
||||||
jsonRes(res, {
|
|
||||||
data: _id
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,8 +1,6 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import {
|
import {
|
||||||
@ -18,97 +16,88 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
|||||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
|
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
|
||||||
const trainingType = TrainingModeEnum.chunk;
|
const trainingType = TrainingModeEnum.chunk;
|
||||||
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
per: WritePermissionVal,
|
||||||
|
datasetId: datasetId
|
||||||
|
});
|
||||||
|
|
||||||
try {
|
// 1. read file
|
||||||
await connectToDatabase();
|
const { rawText, filename } = await readFileContentFromMongo({
|
||||||
|
teamId,
|
||||||
|
bucketName: BucketNameEnum.dataset,
|
||||||
|
fileId,
|
||||||
|
isQAImport: true
|
||||||
|
});
|
||||||
|
console.log(rawText);
|
||||||
|
// 2. split chunks
|
||||||
|
const chunks = rawText2Chunks({
|
||||||
|
rawText,
|
||||||
|
isQAImport: true
|
||||||
|
});
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
// 3. auth limit
|
||||||
req,
|
await checkDatasetLimit({
|
||||||
authToken: true,
|
teamId,
|
||||||
authApiKey: true,
|
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||||
per: 'w',
|
});
|
||||||
datasetId: datasetId
|
|
||||||
});
|
|
||||||
|
|
||||||
// 1. read file
|
await mongoSessionRun(async (session) => {
|
||||||
const { rawText, filename } = await readFileContentFromMongo({
|
// 4. create collection
|
||||||
|
const { _id: collectionId } = await createOneCollection({
|
||||||
teamId,
|
teamId,
|
||||||
bucketName: BucketNameEnum.dataset,
|
tmbId,
|
||||||
|
name: filename,
|
||||||
|
parentId,
|
||||||
|
datasetId,
|
||||||
|
type: DatasetCollectionTypeEnum.file,
|
||||||
fileId,
|
fileId,
|
||||||
isQAImport: true
|
|
||||||
});
|
// special metadata
|
||||||
console.log(rawText);
|
trainingType,
|
||||||
// 2. split chunks
|
chunkSize: 0,
|
||||||
const chunks = rawText2Chunks({
|
|
||||||
rawText,
|
session
|
||||||
isQAImport: true
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// 3. auth limit
|
// 5. create training bill
|
||||||
await checkDatasetLimit({
|
const { billId } = await createTrainingUsage({
|
||||||
teamId,
|
teamId,
|
||||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
tmbId,
|
||||||
|
appName: filename,
|
||||||
|
billSource: UsageSourceEnum.training,
|
||||||
|
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||||
|
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||||
|
session
|
||||||
});
|
});
|
||||||
|
|
||||||
await mongoSessionRun(async (session) => {
|
// 6. insert to training queue
|
||||||
// 4. create collection
|
await pushDataListToTrainingQueue({
|
||||||
const { _id: collectionId } = await createOneCollection({
|
teamId,
|
||||||
teamId,
|
tmbId,
|
||||||
tmbId,
|
datasetId: dataset._id,
|
||||||
name: filename,
|
collectionId,
|
||||||
parentId,
|
agentModel: dataset.agentModel,
|
||||||
datasetId,
|
vectorModel: dataset.vectorModel,
|
||||||
type: DatasetCollectionTypeEnum.file,
|
trainingMode: trainingType,
|
||||||
fileId,
|
billId,
|
||||||
|
data: chunks.map((chunk, index) => ({
|
||||||
// special metadata
|
q: chunk.q,
|
||||||
trainingType,
|
a: chunk.a,
|
||||||
chunkSize: 0,
|
chunkIndex: index
|
||||||
|
})),
|
||||||
session
|
session
|
||||||
});
|
|
||||||
|
|
||||||
// 5. create training bill
|
|
||||||
const { billId } = await createTrainingUsage({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
appName: filename,
|
|
||||||
billSource: UsageSourceEnum.training,
|
|
||||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
|
||||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// 6. insert to training queue
|
|
||||||
await pushDataListToTrainingQueue({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
datasetId: dataset._id,
|
|
||||||
collectionId,
|
|
||||||
agentModel: dataset.agentModel,
|
|
||||||
vectorModel: dataset.vectorModel,
|
|
||||||
trainingMode: trainingType,
|
|
||||||
billId,
|
|
||||||
data: chunks.map((chunk, index) => ({
|
|
||||||
q: chunk.q,
|
|
||||||
a: chunk.a,
|
|
||||||
chunkIndex: index
|
|
||||||
})),
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
return collectionId;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res);
|
return collectionId;
|
||||||
} catch (error) {
|
});
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,8 +1,5 @@
|
|||||||
import type { NextApiResponse } from 'next';
|
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import {
|
import {
|
||||||
@ -23,11 +20,9 @@ import { MongoRawTextBuffer } from '@fastgpt/service/common/buffer/rawText/schem
|
|||||||
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
async function handler(
|
async function handler(req: ApiRequestProps<FileIdCreateDatasetCollectionParams>) {
|
||||||
req: ApiRequestProps<FileIdCreateDatasetCollectionParams>,
|
|
||||||
res: NextApiResponse<any>
|
|
||||||
) {
|
|
||||||
const {
|
const {
|
||||||
fileId,
|
fileId,
|
||||||
trainingType = TrainingModeEnum.chunk,
|
trainingType = TrainingModeEnum.chunk,
|
||||||
@ -37,13 +32,11 @@ async function handler(
|
|||||||
...body
|
...body
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
await connectToDatabase();
|
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
per: 'w',
|
per: WritePermissionVal,
|
||||||
datasetId: body.datasetId
|
datasetId: body.datasetId
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -137,13 +130,10 @@ async function handler(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// remove buffer
|
||||||
|
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
|
||||||
return collectionId;
|
return collectionId;
|
||||||
});
|
});
|
||||||
|
|
||||||
// remove buffer
|
|
||||||
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,11 +1,6 @@
|
|||||||
/*
|
import type { NextApiRequest } from 'next';
|
||||||
Create one dataset collection
|
|
||||||
*/
|
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import {
|
import {
|
||||||
TrainingModeEnum,
|
TrainingModeEnum,
|
||||||
@ -18,83 +13,75 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
|
|||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
|
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const {
|
||||||
await connectToDatabase();
|
link,
|
||||||
const {
|
trainingType = TrainingModeEnum.chunk,
|
||||||
link,
|
chunkSize = 512,
|
||||||
trainingType = TrainingModeEnum.chunk,
|
chunkSplitter,
|
||||||
chunkSize = 512,
|
qaPrompt,
|
||||||
|
...body
|
||||||
|
} = req.body as LinkCreateDatasetCollectionParams;
|
||||||
|
|
||||||
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
datasetId: body.datasetId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
// 1. check dataset limit
|
||||||
|
await checkDatasetLimit({
|
||||||
|
teamId,
|
||||||
|
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
||||||
|
});
|
||||||
|
|
||||||
|
await mongoSessionRun(async (session) => {
|
||||||
|
// 2. create collection
|
||||||
|
const collection = await createOneCollection({
|
||||||
|
...body,
|
||||||
|
name: link,
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
type: DatasetCollectionTypeEnum.link,
|
||||||
|
|
||||||
|
trainingType,
|
||||||
|
chunkSize,
|
||||||
chunkSplitter,
|
chunkSplitter,
|
||||||
qaPrompt,
|
qaPrompt,
|
||||||
...body
|
|
||||||
} = req.body as LinkCreateDatasetCollectionParams;
|
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
rawLink: link,
|
||||||
req,
|
session
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
datasetId: body.datasetId,
|
|
||||||
per: 'w'
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// 1. check dataset limit
|
// 3. create bill and start sync
|
||||||
await checkDatasetLimit({
|
const { billId } = await createTrainingUsage({
|
||||||
teamId,
|
teamId,
|
||||||
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
tmbId,
|
||||||
|
appName: 'core.dataset.collection.Sync Collection',
|
||||||
|
billSource: UsageSourceEnum.training,
|
||||||
|
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||||
|
agentModel: getLLMModel(dataset.agentModel).name,
|
||||||
|
session
|
||||||
});
|
});
|
||||||
|
|
||||||
const { _id: collectionId } = await mongoSessionRun(async (session) => {
|
// load
|
||||||
// 2. create collection
|
await reloadCollectionChunks({
|
||||||
const collection = await createOneCollection({
|
collection: {
|
||||||
...body,
|
...collection.toObject(),
|
||||||
name: link,
|
datasetId: dataset
|
||||||
teamId,
|
},
|
||||||
tmbId,
|
tmbId,
|
||||||
type: DatasetCollectionTypeEnum.link,
|
billId,
|
||||||
|
session
|
||||||
trainingType,
|
|
||||||
chunkSize,
|
|
||||||
chunkSplitter,
|
|
||||||
qaPrompt,
|
|
||||||
|
|
||||||
rawLink: link,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// 3. create bill and start sync
|
|
||||||
const { billId } = await createTrainingUsage({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
appName: 'core.dataset.collection.Sync Collection',
|
|
||||||
billSource: UsageSourceEnum.training,
|
|
||||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
|
||||||
agentModel: getLLMModel(dataset.agentModel).name,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// load
|
|
||||||
await reloadCollectionChunks({
|
|
||||||
collection: {
|
|
||||||
...collection.toObject(),
|
|
||||||
datasetId: dataset
|
|
||||||
},
|
|
||||||
tmbId,
|
|
||||||
billId,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
return collection;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res, {
|
return collection;
|
||||||
data: { collectionId }
|
});
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,8 +1,7 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
|
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
|
||||||
import { getUploadModel } from '@fastgpt/service/common/file/multer';
|
import { getUploadModel } from '@fastgpt/service/common/file/multer';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||||
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
@ -23,6 +22,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
|||||||
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
|
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
|
||||||
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
|
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
/**
|
/**
|
||||||
@ -49,7 +49,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
per: 'w',
|
per: WritePermissionVal,
|
||||||
datasetId: data.datasetId
|
datasetId: data.datasetId
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -168,9 +168,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res, {
|
return { collectionId, results: insertResults };
|
||||||
data: { collectionId, results: insertResults }
|
|
||||||
});
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
removeFilesByPaths(filePaths);
|
removeFilesByPaths(filePaths);
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,6 @@
|
|||||||
/*
|
import type { NextApiRequest } from 'next';
|
||||||
Create one dataset collection
|
|
||||||
*/
|
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import {
|
import {
|
||||||
TrainingModeEnum,
|
TrainingModeEnum,
|
||||||
@ -20,102 +15,94 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
|||||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
|
async function handler(req: NextApiRequest) {
|
||||||
|
const {
|
||||||
|
name,
|
||||||
|
text,
|
||||||
|
trainingType = TrainingModeEnum.chunk,
|
||||||
|
chunkSize = 512,
|
||||||
|
chunkSplitter,
|
||||||
|
qaPrompt,
|
||||||
|
...body
|
||||||
|
} = req.body as TextCreateDatasetCollectionParams;
|
||||||
|
|
||||||
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
datasetId: body.datasetId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
// 1. split text to chunks
|
||||||
|
const { chunks } = splitText2Chunks({
|
||||||
|
text,
|
||||||
|
chunkLen: chunkSize,
|
||||||
|
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
|
||||||
|
customReg: chunkSplitter ? [chunkSplitter] : []
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. check dataset limit
|
||||||
|
await checkDatasetLimit({
|
||||||
|
teamId,
|
||||||
|
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||||
|
});
|
||||||
|
|
||||||
|
const createResult = await mongoSessionRun(async (session) => {
|
||||||
|
// 3. create collection
|
||||||
|
const { _id: collectionId } = await createOneCollection({
|
||||||
|
...body,
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
type: DatasetCollectionTypeEnum.virtual,
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|
||||||
try {
|
|
||||||
await connectToDatabase();
|
|
||||||
const {
|
|
||||||
name,
|
name,
|
||||||
text,
|
trainingType,
|
||||||
trainingType = TrainingModeEnum.chunk,
|
chunkSize,
|
||||||
chunkSize = 512,
|
|
||||||
chunkSplitter,
|
chunkSplitter,
|
||||||
qaPrompt,
|
qaPrompt,
|
||||||
...body
|
|
||||||
} = req.body as TextCreateDatasetCollectionParams;
|
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
hashRawText: hashStr(text),
|
||||||
req,
|
rawTextLength: text.length,
|
||||||
authToken: true,
|
session
|
||||||
authApiKey: true,
|
|
||||||
datasetId: body.datasetId,
|
|
||||||
per: 'w'
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// 1. split text to chunks
|
// 4. create training bill
|
||||||
const { chunks } = splitText2Chunks({
|
const { billId } = await createTrainingUsage({
|
||||||
text,
|
|
||||||
chunkLen: chunkSize,
|
|
||||||
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
|
|
||||||
customReg: chunkSplitter ? [chunkSplitter] : []
|
|
||||||
});
|
|
||||||
|
|
||||||
// 2. check dataset limit
|
|
||||||
await checkDatasetLimit({
|
|
||||||
teamId,
|
teamId,
|
||||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
tmbId,
|
||||||
|
appName: name,
|
||||||
|
billSource: UsageSourceEnum.training,
|
||||||
|
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||||
|
agentModel: getLLMModel(dataset.agentModel)?.name,
|
||||||
|
session
|
||||||
});
|
});
|
||||||
|
|
||||||
const createResult = await mongoSessionRun(async (session) => {
|
// 5. push chunks to training queue
|
||||||
// 3. create collection
|
const insertResults = await pushDataListToTrainingQueue({
|
||||||
const { _id: collectionId } = await createOneCollection({
|
teamId,
|
||||||
...body,
|
tmbId,
|
||||||
teamId,
|
datasetId: dataset._id,
|
||||||
tmbId,
|
collectionId,
|
||||||
type: DatasetCollectionTypeEnum.virtual,
|
agentModel: dataset.agentModel,
|
||||||
|
vectorModel: dataset.vectorModel,
|
||||||
name,
|
trainingMode: trainingType,
|
||||||
trainingType,
|
prompt: qaPrompt,
|
||||||
chunkSize,
|
billId,
|
||||||
chunkSplitter,
|
data: chunks.map((text, index) => ({
|
||||||
qaPrompt,
|
q: text,
|
||||||
|
chunkIndex: index
|
||||||
hashRawText: hashStr(text),
|
})),
|
||||||
rawTextLength: text.length,
|
session
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// 4. create training bill
|
|
||||||
const { billId } = await createTrainingUsage({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
appName: name,
|
|
||||||
billSource: UsageSourceEnum.training,
|
|
||||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
|
||||||
agentModel: getLLMModel(dataset.agentModel)?.name,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// 5. push chunks to training queue
|
|
||||||
const insertResults = await pushDataListToTrainingQueue({
|
|
||||||
teamId,
|
|
||||||
tmbId,
|
|
||||||
datasetId: dataset._id,
|
|
||||||
collectionId,
|
|
||||||
agentModel: dataset.agentModel,
|
|
||||||
vectorModel: dataset.vectorModel,
|
|
||||||
trainingMode: trainingType,
|
|
||||||
prompt: qaPrompt,
|
|
||||||
billId,
|
|
||||||
data: chunks.map((text, index) => ({
|
|
||||||
q: text,
|
|
||||||
chunkIndex: index
|
|
||||||
})),
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
return { collectionId, results: insertResults };
|
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res, {
|
return { collectionId, results: insertResults };
|
||||||
data: createResult
|
});
|
||||||
});
|
|
||||||
} catch (err) {
|
return createResult;
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
@ -125,3 +112,5 @@ export const config = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,50 +1,42 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
|
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
|
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { id: collectionId } = req.query as { id: string };
|
||||||
await connectToDatabase();
|
|
||||||
|
|
||||||
const { id: collectionId } = req.query as { id: string };
|
if (!collectionId) {
|
||||||
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
if (!collectionId) {
|
|
||||||
throw new Error('CollectionIdId is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
const { teamId, collection } = await authDatasetCollection({
|
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
collectionId,
|
|
||||||
per: 'w'
|
|
||||||
});
|
|
||||||
|
|
||||||
// find all delete id
|
|
||||||
const collections = await findCollectionAndChild({
|
|
||||||
teamId,
|
|
||||||
datasetId: collection.datasetId._id,
|
|
||||||
collectionId,
|
|
||||||
fields: '_id teamId datasetId fileId metadata'
|
|
||||||
});
|
|
||||||
|
|
||||||
// delete
|
|
||||||
await mongoSessionRun((session) =>
|
|
||||||
delCollectionAndRelatedSources({
|
|
||||||
collections,
|
|
||||||
session
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { teamId, collection } = await authDatasetCollection({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
collectionId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
// find all delete id
|
||||||
|
const collections = await findCollectionAndChild({
|
||||||
|
teamId,
|
||||||
|
datasetId: collection.datasetId._id,
|
||||||
|
collectionId,
|
||||||
|
fields: '_id teamId datasetId fileId metadata'
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete
|
||||||
|
await mongoSessionRun((session) =>
|
||||||
|
delCollectionAndRelatedSources({
|
||||||
|
collections,
|
||||||
|
session
|
||||||
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,50 +1,43 @@
|
|||||||
/*
|
/*
|
||||||
Get one dataset collection detail
|
Get one dataset collection detail
|
||||||
*/
|
*/
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
|
||||||
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
|
|
||||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||||
import { getFileById } from '@fastgpt/service/common/file/gridfs/controller';
|
import { getFileById } from '@fastgpt/service/common/file/gridfs/controller';
|
||||||
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest): Promise<DatasetCollectionItemType> {
|
||||||
try {
|
const { id } = req.query as { id: string };
|
||||||
await connectToDatabase();
|
|
||||||
const { id } = req.query as { id: string };
|
|
||||||
|
|
||||||
if (!id) {
|
if (!id) {
|
||||||
throw new Error('Id is required');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
|
||||||
|
|
||||||
// 凭证校验
|
|
||||||
const { collection, canWrite } = await authDatasetCollection({
|
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
collectionId: id,
|
|
||||||
per: 'r'
|
|
||||||
});
|
|
||||||
|
|
||||||
// get file
|
|
||||||
const file = collection?.fileId
|
|
||||||
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
|
|
||||||
: undefined;
|
|
||||||
|
|
||||||
jsonRes<DatasetCollectionItemType>(res, {
|
|
||||||
data: {
|
|
||||||
...collection,
|
|
||||||
canWrite,
|
|
||||||
...getCollectionSourceData(collection),
|
|
||||||
file
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 凭证校验
|
||||||
|
const { collection, permission } = await authDatasetCollection({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
collectionId: id,
|
||||||
|
per: ReadPermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
// get file
|
||||||
|
const file = collection?.fileId
|
||||||
|
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return {
|
||||||
|
...collection,
|
||||||
|
...getCollectionSourceData(collection),
|
||||||
|
permission,
|
||||||
|
file
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,179 +1,167 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
||||||
import { Types } from '@fastgpt/service/common/mongo';
|
import { Types } from '@fastgpt/service/common/mongo';
|
||||||
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
||||||
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
|
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
|
||||||
import { PagingData } from '@/types';
|
|
||||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
|
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
let {
|
||||||
await connectToDatabase();
|
pageNum = 1,
|
||||||
|
pageSize = 10,
|
||||||
|
datasetId,
|
||||||
|
parentId = null,
|
||||||
|
searchText = '',
|
||||||
|
selectFolder = false,
|
||||||
|
simple = false
|
||||||
|
} = req.body as GetDatasetCollectionsProps;
|
||||||
|
searchText = searchText?.replace(/'/g, '');
|
||||||
|
pageSize = Math.min(pageSize, 30);
|
||||||
|
|
||||||
let {
|
// auth dataset and get my role
|
||||||
pageNum = 1,
|
const { teamId, permission } = await authDataset({
|
||||||
pageSize = 10,
|
req,
|
||||||
datasetId,
|
authToken: true,
|
||||||
parentId = null,
|
authApiKey: true,
|
||||||
searchText = '',
|
datasetId,
|
||||||
selectFolder = false,
|
per: ReadPermissionVal
|
||||||
simple = false
|
});
|
||||||
} = req.body as GetDatasetCollectionsProps;
|
|
||||||
searchText = searchText?.replace(/'/g, '');
|
|
||||||
pageSize = Math.min(pageSize, 30);
|
|
||||||
|
|
||||||
// auth dataset and get my role
|
const match = {
|
||||||
const { teamId, tmbId, canWrite } = await authDataset({
|
teamId: new Types.ObjectId(teamId),
|
||||||
req,
|
datasetId: new Types.ObjectId(datasetId),
|
||||||
authToken: true,
|
parentId: parentId ? new Types.ObjectId(parentId) : null,
|
||||||
authApiKey: true,
|
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
|
||||||
datasetId,
|
...(searchText
|
||||||
per: 'r'
|
? {
|
||||||
});
|
name: new RegExp(searchText, 'i')
|
||||||
|
}
|
||||||
|
: {})
|
||||||
|
};
|
||||||
|
|
||||||
const match = {
|
// not count data amount
|
||||||
teamId: new Types.ObjectId(teamId),
|
if (simple) {
|
||||||
datasetId: new Types.ObjectId(datasetId),
|
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
|
||||||
parentId: parentId ? new Types.ObjectId(parentId) : null,
|
.sort({
|
||||||
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
|
updateTime: -1
|
||||||
...(searchText
|
})
|
||||||
? {
|
.lean();
|
||||||
name: new RegExp(searchText, 'i')
|
return {
|
||||||
}
|
pageNum,
|
||||||
: {})
|
pageSize,
|
||||||
|
data: await Promise.all(
|
||||||
|
collections.map(async (item) => ({
|
||||||
|
...item,
|
||||||
|
dataAmount: 0,
|
||||||
|
trainingAmount: 0,
|
||||||
|
permission
|
||||||
|
}))
|
||||||
|
),
|
||||||
|
total: await MongoDatasetCollection.countDocuments(match)
|
||||||
};
|
};
|
||||||
|
|
||||||
// not count data amount
|
|
||||||
if (simple) {
|
|
||||||
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
|
|
||||||
.sort({
|
|
||||||
updateTime: -1
|
|
||||||
})
|
|
||||||
.lean();
|
|
||||||
return jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
|
|
||||||
data: {
|
|
||||||
pageNum,
|
|
||||||
pageSize,
|
|
||||||
data: await Promise.all(
|
|
||||||
collections.map(async (item) => ({
|
|
||||||
...item,
|
|
||||||
dataAmount: 0,
|
|
||||||
trainingAmount: 0,
|
|
||||||
canWrite // admin or team owner can write
|
|
||||||
}))
|
|
||||||
),
|
|
||||||
total: await MongoDatasetCollection.countDocuments(match)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
|
|
||||||
MongoDatasetCollection.aggregate([
|
|
||||||
{
|
|
||||||
$match: match
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$sort: { updateTime: -1 }
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$skip: (pageNum - 1) * pageSize
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$limit: pageSize
|
|
||||||
},
|
|
||||||
// count training data
|
|
||||||
{
|
|
||||||
$lookup: {
|
|
||||||
from: DatasetTrainingCollectionName,
|
|
||||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
|
||||||
pipeline: [
|
|
||||||
{
|
|
||||||
$match: {
|
|
||||||
$expr: {
|
|
||||||
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ $count: 'count' }
|
|
||||||
],
|
|
||||||
as: 'trainingCount'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// count collection total data
|
|
||||||
{
|
|
||||||
$lookup: {
|
|
||||||
from: DatasetDataCollectionName,
|
|
||||||
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
|
||||||
pipeline: [
|
|
||||||
{
|
|
||||||
$match: {
|
|
||||||
$expr: {
|
|
||||||
$and: [
|
|
||||||
{ $eq: ['$teamId', '$$team_id'] },
|
|
||||||
{ $eq: ['$datasetId', '$$dataset_id'] },
|
|
||||||
{ $eq: ['$collectionId', '$$id'] }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{ $count: 'count' }
|
|
||||||
],
|
|
||||||
as: 'dataCount'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$project: {
|
|
||||||
_id: 1,
|
|
||||||
parentId: 1,
|
|
||||||
tmbId: 1,
|
|
||||||
name: 1,
|
|
||||||
type: 1,
|
|
||||||
status: 1,
|
|
||||||
updateTime: 1,
|
|
||||||
fileId: 1,
|
|
||||||
rawLink: 1,
|
|
||||||
dataAmount: {
|
|
||||||
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
|
|
||||||
},
|
|
||||||
trainingAmount: {
|
|
||||||
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]),
|
|
||||||
MongoDatasetCollection.countDocuments(match)
|
|
||||||
]);
|
|
||||||
|
|
||||||
const data = await Promise.all(
|
|
||||||
collections.map(async (item, i) => ({
|
|
||||||
...item,
|
|
||||||
canWrite: String(item.tmbId) === tmbId || canWrite
|
|
||||||
}))
|
|
||||||
);
|
|
||||||
|
|
||||||
if (data.find((item) => item.trainingAmount > 0)) {
|
|
||||||
startTrainingQueue();
|
|
||||||
}
|
|
||||||
|
|
||||||
// count collections
|
|
||||||
jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
|
|
||||||
data: {
|
|
||||||
pageNum,
|
|
||||||
pageSize,
|
|
||||||
data,
|
|
||||||
total
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
|
||||||
|
MongoDatasetCollection.aggregate([
|
||||||
|
{
|
||||||
|
$match: match
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$sort: { updateTime: -1 }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$skip: (pageNum - 1) * pageSize
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$limit: pageSize
|
||||||
|
},
|
||||||
|
// count training data
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: DatasetTrainingCollectionName,
|
||||||
|
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||||
|
pipeline: [
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
$expr: {
|
||||||
|
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $count: 'count' }
|
||||||
|
],
|
||||||
|
as: 'trainingCount'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// count collection total data
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: DatasetDataCollectionName,
|
||||||
|
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||||
|
pipeline: [
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
$expr: {
|
||||||
|
$and: [
|
||||||
|
{ $eq: ['$teamId', '$$team_id'] },
|
||||||
|
{ $eq: ['$datasetId', '$$dataset_id'] },
|
||||||
|
{ $eq: ['$collectionId', '$$id'] }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $count: 'count' }
|
||||||
|
],
|
||||||
|
as: 'dataCount'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
_id: 1,
|
||||||
|
parentId: 1,
|
||||||
|
tmbId: 1,
|
||||||
|
name: 1,
|
||||||
|
type: 1,
|
||||||
|
status: 1,
|
||||||
|
updateTime: 1,
|
||||||
|
fileId: 1,
|
||||||
|
rawLink: 1,
|
||||||
|
dataAmount: {
|
||||||
|
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
|
||||||
|
},
|
||||||
|
trainingAmount: {
|
||||||
|
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]),
|
||||||
|
MongoDatasetCollection.countDocuments(match)
|
||||||
|
]);
|
||||||
|
|
||||||
|
const data = await Promise.all(
|
||||||
|
collections.map(async (item) => ({
|
||||||
|
...item,
|
||||||
|
permission
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
if (data.find((item) => item.trainingAmount > 0)) {
|
||||||
|
startTrainingQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
// count collections
|
||||||
|
return {
|
||||||
|
pageNum,
|
||||||
|
pageSize,
|
||||||
|
data,
|
||||||
|
total
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,34 +1,24 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
|
||||||
import { getDatasetCollectionPaths } from '@fastgpt/service/core/dataset/collection/utils';
|
import { getDatasetCollectionPaths } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { parentId } = req.query as { parentId: string };
|
||||||
await connectToDatabase();
|
|
||||||
|
|
||||||
const { parentId } = req.query as { parentId: string };
|
if (!parentId) {
|
||||||
|
return [];
|
||||||
if (!parentId) {
|
|
||||||
return jsonRes(res, {
|
|
||||||
data: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await authDatasetCollection({ req, authToken: true, collectionId: parentId, per: 'r' });
|
|
||||||
const paths = await getDatasetCollectionPaths({
|
|
||||||
parentId
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes<ParentTreePathItemType[]>(res, {
|
|
||||||
data: paths
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await authDatasetCollection({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
collectionId: parentId,
|
||||||
|
per: ReadPermissionVal
|
||||||
|
});
|
||||||
|
const paths = await getDatasetCollectionPaths({
|
||||||
|
parentId
|
||||||
|
});
|
||||||
|
|
||||||
|
return paths;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
import type { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { createFileToken } from '@fastgpt/service/support/permission/controller';
|
import { createFileToken } from '@fastgpt/service/support/permission/controller';
|
||||||
import { BucketNameEnum, ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
import { BucketNameEnum, ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export type readCollectionSourceQuery = {
|
export type readCollectionSourceQuery = {
|
||||||
collectionId: string;
|
collectionId: string;
|
||||||
@ -17,15 +18,14 @@ export type readCollectionSourceResponse = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async function handler(
|
async function handler(
|
||||||
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>,
|
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>
|
||||||
res: ApiResponseType<any>
|
|
||||||
): Promise<readCollectionSourceResponse> {
|
): Promise<readCollectionSourceResponse> {
|
||||||
const { collection, teamId, tmbId } = await authDatasetCollection({
|
const { collection, teamId, tmbId } = await authDatasetCollection({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
collectionId: req.query.collectionId,
|
collectionId: req.query.collectionId,
|
||||||
per: 'r'
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const sourceUrl = await (async () => {
|
const sourceUrl = await (async () => {
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
|
||||||
import {
|
import {
|
||||||
getCollectionAndRawText,
|
getCollectionAndRawText,
|
||||||
reloadCollectionChunks
|
reloadCollectionChunks
|
||||||
@ -17,98 +15,90 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
|
|||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { collectionId } = req.body as { collectionId: string };
|
||||||
await connectToDatabase();
|
|
||||||
|
|
||||||
const { collectionId } = req.body as { collectionId: string };
|
if (!collectionId) {
|
||||||
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
if (!collectionId) {
|
|
||||||
throw new Error('CollectionIdId is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
const { collection, tmbId } = await authDatasetCollection({
|
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
collectionId,
|
|
||||||
per: 'w'
|
|
||||||
});
|
|
||||||
|
|
||||||
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
|
|
||||||
return Promise.reject(DatasetErrEnum.unLinkCollection);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
|
|
||||||
collection
|
|
||||||
});
|
|
||||||
|
|
||||||
if (isSameRawText) {
|
|
||||||
return jsonRes(res, {
|
|
||||||
data: DatasetCollectionSyncResultEnum.sameRaw
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Not the same original text, create and reload */
|
|
||||||
|
|
||||||
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
|
|
||||||
const agentModelData = getLLMModel(collection.datasetId.agentModel);
|
|
||||||
|
|
||||||
await mongoSessionRun(async (session) => {
|
|
||||||
// create training bill
|
|
||||||
const { billId } = await createTrainingUsage({
|
|
||||||
teamId: collection.teamId,
|
|
||||||
tmbId,
|
|
||||||
appName: 'core.dataset.collection.Sync Collection',
|
|
||||||
billSource: UsageSourceEnum.training,
|
|
||||||
vectorModel: vectorModelData.name,
|
|
||||||
agentModel: agentModelData.name,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// create a collection and delete old
|
|
||||||
const newCol = await createOneCollection({
|
|
||||||
teamId: collection.teamId,
|
|
||||||
tmbId: collection.tmbId,
|
|
||||||
parentId: collection.parentId,
|
|
||||||
datasetId: collection.datasetId._id,
|
|
||||||
name: title || collection.name,
|
|
||||||
type: collection.type,
|
|
||||||
trainingType: collection.trainingType,
|
|
||||||
chunkSize: collection.chunkSize,
|
|
||||||
fileId: collection.fileId,
|
|
||||||
rawLink: collection.rawLink,
|
|
||||||
metadata: collection.metadata,
|
|
||||||
createTime: collection.createTime,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// start load
|
|
||||||
await reloadCollectionChunks({
|
|
||||||
collection: {
|
|
||||||
...newCol.toObject(),
|
|
||||||
datasetId: collection.datasetId
|
|
||||||
},
|
|
||||||
tmbId,
|
|
||||||
billId,
|
|
||||||
rawText,
|
|
||||||
session
|
|
||||||
});
|
|
||||||
|
|
||||||
// delete old collection
|
|
||||||
await delCollectionAndRelatedSources({
|
|
||||||
collections: [collection],
|
|
||||||
session
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes(res, {
|
|
||||||
data: DatasetCollectionSyncResultEnum.success
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { collection, tmbId } = await authDatasetCollection({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
collectionId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
|
||||||
|
return Promise.reject(DatasetErrEnum.unLinkCollection);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
|
||||||
|
collection
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isSameRawText) {
|
||||||
|
return DatasetCollectionSyncResultEnum.sameRaw;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Not the same original text, create and reload */
|
||||||
|
|
||||||
|
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
|
||||||
|
const agentModelData = getLLMModel(collection.datasetId.agentModel);
|
||||||
|
|
||||||
|
await mongoSessionRun(async (session) => {
|
||||||
|
// create training bill
|
||||||
|
const { billId } = await createTrainingUsage({
|
||||||
|
teamId: collection.teamId,
|
||||||
|
tmbId,
|
||||||
|
appName: 'core.dataset.collection.Sync Collection',
|
||||||
|
billSource: UsageSourceEnum.training,
|
||||||
|
vectorModel: vectorModelData.name,
|
||||||
|
agentModel: agentModelData.name,
|
||||||
|
session
|
||||||
|
});
|
||||||
|
|
||||||
|
// create a collection and delete old
|
||||||
|
const newCol = await createOneCollection({
|
||||||
|
teamId: collection.teamId,
|
||||||
|
tmbId: collection.tmbId,
|
||||||
|
parentId: collection.parentId,
|
||||||
|
datasetId: collection.datasetId._id,
|
||||||
|
name: title || collection.name,
|
||||||
|
type: collection.type,
|
||||||
|
trainingType: collection.trainingType,
|
||||||
|
chunkSize: collection.chunkSize,
|
||||||
|
fileId: collection.fileId,
|
||||||
|
rawLink: collection.rawLink,
|
||||||
|
metadata: collection.metadata,
|
||||||
|
createTime: collection.createTime,
|
||||||
|
session
|
||||||
|
});
|
||||||
|
|
||||||
|
// start load
|
||||||
|
await reloadCollectionChunks({
|
||||||
|
collection: {
|
||||||
|
...newCol.toObject(),
|
||||||
|
datasetId: collection.datasetId
|
||||||
|
},
|
||||||
|
tmbId,
|
||||||
|
billId,
|
||||||
|
rawText,
|
||||||
|
session
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete old collection
|
||||||
|
await delCollectionAndRelatedSources({
|
||||||
|
collections: [collection],
|
||||||
|
session
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return DatasetCollectionSyncResultEnum.success;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,43 +1,36 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { UpdateDatasetCollectionParams } from '@/global/core/api/datasetReq.d';
|
import type { UpdateDatasetCollectionParams } from '@/global/core/api/datasetReq.d';
|
||||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||||
import { getCollectionUpdateTime } from '@fastgpt/service/core/dataset/collection/utils';
|
import { getCollectionUpdateTime } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
|
||||||
await connectToDatabase();
|
|
||||||
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
|
|
||||||
|
|
||||||
if (!id) {
|
if (!id) {
|
||||||
throw new Error('缺少参数');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
|
||||||
|
|
||||||
// 凭证校验
|
|
||||||
await authDatasetCollection({
|
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
collectionId: id,
|
|
||||||
per: 'w'
|
|
||||||
});
|
|
||||||
|
|
||||||
const updateFields: Record<string, any> = {
|
|
||||||
...(parentId !== undefined && { parentId: parentId || null }),
|
|
||||||
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
|
|
||||||
};
|
|
||||||
|
|
||||||
await MongoDatasetCollection.findByIdAndUpdate(id, {
|
|
||||||
$set: updateFields
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 凭证校验
|
||||||
|
await authDatasetCollection({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
collectionId: id,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
const updateFields: Record<string, any> = {
|
||||||
|
...(parentId !== undefined && { parentId: parentId || null }),
|
||||||
|
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
|
||||||
|
};
|
||||||
|
|
||||||
|
await MongoDatasetCollection.findByIdAndUpdate(id, {
|
||||||
|
$set: updateFields
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,6 +1,4 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
|
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
|
||||||
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
@ -8,62 +6,59 @@ import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
|||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
import { NullPermission, WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const {
|
||||||
await connectToDatabase();
|
parentId,
|
||||||
const {
|
name,
|
||||||
parentId,
|
type = DatasetTypeEnum.dataset,
|
||||||
name,
|
avatar,
|
||||||
type = DatasetTypeEnum.dataset,
|
vectorModel = global.vectorModels[0].model,
|
||||||
avatar,
|
agentModel = getDatasetModel().model,
|
||||||
vectorModel = global.vectorModels[0].model,
|
defaultPermission = NullPermission
|
||||||
agentModel = getDatasetModel().model
|
} = req.body as CreateDatasetParams;
|
||||||
} = req.body as CreateDatasetParams;
|
|
||||||
|
|
||||||
// auth
|
// auth
|
||||||
const { teamId, tmbId } = await authUserPer({
|
const { teamId, tmbId } = await authUserPer({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
per: WritePermissionVal
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
// check model valid
|
// check model valid
|
||||||
const vectorModelStore = getVectorModel(vectorModel);
|
const vectorModelStore = getVectorModel(vectorModel);
|
||||||
const agentModelStore = getLLMModel(agentModel);
|
const agentModelStore = getLLMModel(agentModel);
|
||||||
if (!vectorModelStore || !agentModelStore) {
|
if (!vectorModelStore || !agentModelStore) {
|
||||||
throw new Error('vectorModel or qaModel is invalid');
|
throw new Error('vectorModel or qaModel is invalid'); // TODO: use enum code
|
||||||
}
|
}
|
||||||
|
|
||||||
// check limit
|
// check limit
|
||||||
await checkTeamDatasetLimit(teamId);
|
await checkTeamDatasetLimit(teamId);
|
||||||
|
|
||||||
const { _id } = await MongoDataset.create({
|
const { _id } = await MongoDataset.create({
|
||||||
name,
|
name,
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
vectorModel,
|
||||||
|
agentModel,
|
||||||
|
avatar,
|
||||||
|
parentId: parentId || null,
|
||||||
|
type,
|
||||||
|
defaultPermission
|
||||||
|
});
|
||||||
|
|
||||||
|
if (type === DatasetTypeEnum.dataset) {
|
||||||
|
await createDefaultCollection({
|
||||||
|
datasetId: _id,
|
||||||
teamId,
|
teamId,
|
||||||
tmbId,
|
tmbId
|
||||||
vectorModel,
|
|
||||||
agentModel,
|
|
||||||
avatar,
|
|
||||||
parentId: parentId || null,
|
|
||||||
type
|
|
||||||
});
|
|
||||||
|
|
||||||
if (type === DatasetTypeEnum.dataset) {
|
|
||||||
await createDefaultCollection({
|
|
||||||
datasetId: _id,
|
|
||||||
teamId,
|
|
||||||
tmbId
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonRes(res, { data: _id });
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return _id;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,32 +1,31 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
|
||||||
import { deleteDatasetData } from '@/service/core/dataset/data/controller';
|
import { deleteDatasetData } from '@/service/core/dataset/data/controller';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { id: dataId } = req.query as {
|
const { id: dataId } = req.query as {
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!dataId) {
|
if (!dataId) {
|
||||||
throw new Error('dataId is required');
|
Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
const { teamId, datasetData } = await authDatasetData({
|
const { datasetData } = await authDatasetData({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
dataId,
|
dataId,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
await deleteDatasetData(datasetData);
|
await deleteDatasetData(datasetData);
|
||||||
|
|
||||||
jsonRes(res, {
|
return 'success';
|
||||||
data: 'success'
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,8 +1,7 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
|
||||||
export type Response = {
|
export type Response = {
|
||||||
id: string;
|
id: string;
|
||||||
@ -11,7 +10,7 @@ export type Response = {
|
|||||||
source: string;
|
source: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { id: dataId } = req.query as {
|
const { id: dataId } = req.query as {
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
@ -22,12 +21,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
dataId,
|
dataId,
|
||||||
per: 'r'
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res, {
|
return datasetData;
|
||||||
data: datasetData
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -2,30 +2,30 @@
|
|||||||
insert one data to dataset (immediately insert)
|
insert one data to dataset (immediately insert)
|
||||||
manual input or mark data
|
manual input or mark data
|
||||||
*/
|
*/
|
||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
|
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
|
||||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { hasSameValue } from '@/service/core/dataset/data/utils';
|
import { hasSameValue } from '@/service/core/dataset/data/utils';
|
||||||
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
|
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
|
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
|
||||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||||
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
|
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
|
||||||
import { simpleText } from '@fastgpt/global/common/string/tools';
|
import { simpleText } from '@fastgpt/global/common/string/tools';
|
||||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { collectionId, q, a, indexes } = req.body as InsertOneDatasetDataProps;
|
const { collectionId, q, a, indexes } = req.body as InsertOneDatasetDataProps;
|
||||||
|
|
||||||
if (!q) {
|
if (!q) {
|
||||||
throw new Error('q is required');
|
Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!collectionId) {
|
if (!collectionId) {
|
||||||
throw new Error('collectionId is required');
|
Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
@ -34,7 +34,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
collectionId,
|
collectionId,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
await checkDatasetLimit({
|
await checkDatasetLimit({
|
||||||
@ -93,9 +93,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
model: vectorModelData.model
|
model: vectorModelData.model
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes<string>(res, {
|
return insertId;
|
||||||
data: insertId
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,15 +1,12 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
|
|
||||||
import type { GetDatasetDataListProps } from '@/global/core/api/datasetReq';
|
import type { GetDatasetDataListProps } from '@/global/core/api/datasetReq';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
import { PagingData } from '@/types';
|
|
||||||
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
|
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
let {
|
let {
|
||||||
pageNum = 1,
|
pageNum = 1,
|
||||||
pageSize = 10,
|
pageSize = 10,
|
||||||
@ -25,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
collectionId,
|
collectionId,
|
||||||
per: 'r'
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
searchText = replaceRegChars(searchText).replace(/'/g, '');
|
searchText = replaceRegChars(searchText).replace(/'/g, '');
|
||||||
@ -50,14 +47,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
MongoDatasetData.countDocuments(match)
|
MongoDatasetData.countDocuments(match)
|
||||||
]);
|
]);
|
||||||
|
|
||||||
jsonRes<PagingData<DatasetDataListItemType>>(res, {
|
return {
|
||||||
data: {
|
pageNum,
|
||||||
pageNum,
|
pageSize,
|
||||||
pageSize,
|
data,
|
||||||
data,
|
total
|
||||||
total
|
};
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -5,11 +5,12 @@ import type {
|
|||||||
PushDatasetDataProps,
|
PushDatasetDataProps,
|
||||||
PushDatasetDataResponse
|
PushDatasetDataResponse
|
||||||
} from '@fastgpt/global/core/dataset/api.d';
|
} from '@fastgpt/global/core/dataset/api.d';
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||||
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
|
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
|
||||||
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
|
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
const body = req.body as PushDatasetDataProps;
|
const body = req.body as PushDatasetDataProps;
|
||||||
@ -29,7 +30,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
collectionId,
|
collectionId,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
// auth dataset limit
|
// auth dataset limit
|
||||||
|
|||||||
@ -1,14 +1,13 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { updateData2Dataset } from '@/service/core/dataset/data/controller';
|
import { updateData2Dataset } from '@/service/core/dataset/data/controller';
|
||||||
import { authDatasetData } from '@/service/support/permission/auth/dataset';
|
|
||||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||||
import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
|
import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
|
||||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
|
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
|
||||||
|
|
||||||
// auth data permission
|
// auth data permission
|
||||||
@ -23,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
dataId: id,
|
dataId: id,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
// auth team balance
|
// auth team balance
|
||||||
@ -46,8 +45,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
tokens,
|
tokens,
|
||||||
model: vectorModel
|
model: vectorModel
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,54 +1,47 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
|
||||||
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
|
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
|
||||||
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { id: datasetId } = req.query as {
|
||||||
await connectToDatabase();
|
id: string;
|
||||||
const { id: datasetId } = req.query as {
|
};
|
||||||
id: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!datasetId) {
|
if (!datasetId) {
|
||||||
throw new Error('缺少参数');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
|
||||||
|
|
||||||
// auth owner
|
|
||||||
const { teamId } = await authDataset({
|
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
datasetId,
|
|
||||||
per: 'owner'
|
|
||||||
});
|
|
||||||
|
|
||||||
const datasets = await findDatasetAndAllChildren({
|
|
||||||
teamId,
|
|
||||||
datasetId
|
|
||||||
});
|
|
||||||
|
|
||||||
// delete all dataset.data and pg data
|
|
||||||
await mongoSessionRun(async (session) => {
|
|
||||||
// delete dataset data
|
|
||||||
await delDatasetRelevantData({ datasets, session });
|
|
||||||
await MongoDataset.deleteMany(
|
|
||||||
{
|
|
||||||
_id: { $in: datasets.map((d) => d._id) }
|
|
||||||
},
|
|
||||||
{ session }
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// auth owner
|
||||||
|
const { teamId } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
datasetId,
|
||||||
|
per: OwnerPermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
const datasets = await findDatasetAndAllChildren({
|
||||||
|
teamId,
|
||||||
|
datasetId
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete all dataset.data and pg data
|
||||||
|
await mongoSessionRun(async (session) => {
|
||||||
|
// delete dataset data
|
||||||
|
await delDatasetRelevantData({ datasets, session });
|
||||||
|
await MongoDataset.deleteMany(
|
||||||
|
{
|
||||||
|
_id: { $in: datasets.map((d) => d._id) }
|
||||||
|
},
|
||||||
|
{ session }
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,43 +1,39 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
||||||
|
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
type Query = {
|
||||||
try {
|
id: string;
|
||||||
await connectToDatabase();
|
};
|
||||||
const { id: datasetId } = req.query as {
|
|
||||||
id: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!datasetId) {
|
async function handler(req: ApiRequestProps<Query>): Promise<DatasetItemType> {
|
||||||
throw new Error('缺少参数');
|
const { id: datasetId } = req.query as {
|
||||||
}
|
id: string;
|
||||||
|
};
|
||||||
|
|
||||||
// 凭证校验
|
if (!datasetId) {
|
||||||
const { dataset, canWrite, isOwner } = await authDataset({
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
req,
|
|
||||||
authToken: true,
|
|
||||||
authApiKey: true,
|
|
||||||
datasetId,
|
|
||||||
per: 'r'
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes<DatasetItemType>(res, {
|
|
||||||
data: {
|
|
||||||
...dataset,
|
|
||||||
vectorModel: getVectorModel(dataset.vectorModel),
|
|
||||||
agentModel: getLLMModel(dataset.agentModel),
|
|
||||||
canWrite,
|
|
||||||
isOwner
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 凭证校验
|
||||||
|
const { dataset, permission } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
datasetId,
|
||||||
|
per: ReadPermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
...dataset,
|
||||||
|
permission,
|
||||||
|
vectorModel: getVectorModel(dataset.vectorModel),
|
||||||
|
agentModel: getLLMModel(dataset.agentModel)
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { responseWriteController } from '@fastgpt/service/common/response';
|
import { responseWriteController } from '@fastgpt/service/common/response';
|
||||||
import { addLog } from '@fastgpt/service/common/system/log';
|
import { addLog } from '@fastgpt/service/common/system/log';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
|
||||||
import {
|
import {
|
||||||
@ -9,6 +9,8 @@ import {
|
|||||||
updateExportDatasetLimit
|
updateExportDatasetLimit
|
||||||
} from '@fastgpt/service/support/user/utils';
|
} from '@fastgpt/service/support/user/utils';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
let { datasetId } = req.query as {
|
let { datasetId } = req.query as {
|
||||||
@ -16,11 +18,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if (!datasetId || !global.pgClient) {
|
if (!datasetId || !global.pgClient) {
|
||||||
throw new Error('缺少参数');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
const { teamId } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
datasetId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
await checkExportDatasetLimit({
|
await checkExportDatasetLimit({
|
||||||
teamId,
|
teamId,
|
||||||
|
|||||||
@ -1,24 +1,14 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
import { authDatasetFile } from '@fastgpt/service/support/permission/auth/dataset';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type.d';
|
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { fileId } = req.query as { fileId: string };
|
||||||
await connectToDatabase();
|
// 凭证校验
|
||||||
|
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: ReadPermissionVal });
|
||||||
|
|
||||||
const { fileId } = req.query as { fileId: string };
|
return file;
|
||||||
// 凭证校验
|
|
||||||
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: 'r' });
|
|
||||||
|
|
||||||
jsonRes<DatasetFileSchema>(res, {
|
|
||||||
data: file
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import type { NextApiResponse } from 'next';
|
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { authFile } from '@fastgpt/service/support/permission/auth/file';
|
|
||||||
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { rawText2Chunks, readDatasetSourceRawText } from '@fastgpt/service/core/dataset/read';
|
import { rawText2Chunks, readDatasetSourceRawText } from '@fastgpt/service/core/dataset/read';
|
||||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export type PostPreviewFilesChunksProps = {
|
export type PostPreviewFilesChunksProps = {
|
||||||
type: DatasetSourceReadTypeEnum;
|
type: DatasetSourceReadTypeEnum;
|
||||||
@ -21,8 +21,7 @@ export type PreviewChunksResponse = {
|
|||||||
}[];
|
}[];
|
||||||
|
|
||||||
async function handler(
|
async function handler(
|
||||||
req: ApiRequestProps<PostPreviewFilesChunksProps>,
|
req: ApiRequestProps<PostPreviewFilesChunksProps>
|
||||||
res: NextApiResponse<any>
|
|
||||||
): Promise<PreviewChunksResponse> {
|
): Promise<PreviewChunksResponse> {
|
||||||
const { type, sourceId, chunkSize, customSplitChar, overlapRatio, selector, isQAImport } =
|
const { type, sourceId, chunkSize, customSplitChar, overlapRatio, selector, isQAImport } =
|
||||||
req.body;
|
req.body;
|
||||||
@ -36,7 +35,13 @@ async function handler(
|
|||||||
|
|
||||||
const { teamId } = await (async () => {
|
const { teamId } = await (async () => {
|
||||||
if (type === DatasetSourceReadTypeEnum.fileLocal) {
|
if (type === DatasetSourceReadTypeEnum.fileLocal) {
|
||||||
return authFile({ req, authToken: true, authApiKey: true, fileId: sourceId });
|
return authDatasetFile({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
fileId: sourceId,
|
||||||
|
per: ReadPermissionVal
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return authCert({ req, authApiKey: true, authToken: true });
|
return authCert({ req, authApiKey: true, authToken: true });
|
||||||
})();
|
})();
|
||||||
|
|||||||
@ -1,34 +1,68 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
|
||||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||||
|
import {
|
||||||
|
PerResourceTypeEnum,
|
||||||
|
ReadPermissionVal
|
||||||
|
} from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
|
||||||
|
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
const { teamId, tmbId, permission } = await authUserPer({
|
const {
|
||||||
|
teamId,
|
||||||
|
tmbId,
|
||||||
|
permission: tmbPer
|
||||||
|
} = await authUserPer({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
per: ReadPermissionVal
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const datasets = await MongoDataset.find({
|
const [myDatasets, rpList] = await Promise.all([
|
||||||
...mongoRPermission({ teamId, tmbId, permission }),
|
MongoDataset.find({
|
||||||
...(parentId !== undefined && { parentId: parentId || null }),
|
teamId,
|
||||||
...(type && { type })
|
...parseParentIdInMongo(parentId),
|
||||||
})
|
...(type && { type })
|
||||||
.sort({ updateTime: -1 })
|
})
|
||||||
.lean();
|
.sort({
|
||||||
|
updateTime: -1
|
||||||
|
})
|
||||||
|
.lean(),
|
||||||
|
MongoResourcePermission.find({
|
||||||
|
resourceType: PerResourceTypeEnum.dataset,
|
||||||
|
teamId,
|
||||||
|
tmbId
|
||||||
|
}).lean()
|
||||||
|
]);
|
||||||
|
|
||||||
|
const filterDatasets = myDatasets
|
||||||
|
.map((dataset) => {
|
||||||
|
const perVal = rpList.find(
|
||||||
|
(item) => String(item.resourceId) === String(dataset._id)
|
||||||
|
)?.permission;
|
||||||
|
const Per = new DatasetPermission({
|
||||||
|
per: perVal ?? dataset.defaultPermission,
|
||||||
|
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
...dataset,
|
||||||
|
permission: Per
|
||||||
|
};
|
||||||
|
})
|
||||||
|
.filter((app) => app.permission.hasReadPer);
|
||||||
|
|
||||||
const data = await Promise.all(
|
const data = await Promise.all(
|
||||||
datasets.map<DatasetListItemType>((item) => ({
|
filterDatasets.map<DatasetListItemType>((item) => ({
|
||||||
_id: item._id,
|
_id: item._id,
|
||||||
parentId: item.parentId,
|
parentId: item.parentId,
|
||||||
avatar: item.avatar,
|
avatar: item.avatar,
|
||||||
@ -36,15 +70,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
intro: item.intro,
|
intro: item.intro,
|
||||||
type: item.type,
|
type: item.type,
|
||||||
permission: item.permission,
|
permission: item.permission,
|
||||||
canWrite: permission.hasWritePer,
|
vectorModel: getVectorModel(item.vectorModel),
|
||||||
isOwner: permission.isOwner || String(item.tmbId) === tmbId,
|
defaultPermission: item.defaultPermission
|
||||||
vectorModel: getVectorModel(item.vectorModel)
|
|
||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
jsonRes<DatasetListItemType[]>(res, {
|
return data;
|
||||||
data
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,33 +1,20 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { parentId } = req.query as { parentId: string };
|
||||||
await connectToDatabase();
|
|
||||||
|
|
||||||
const { parentId } = req.query as { parentId: string };
|
if (!parentId) {
|
||||||
|
return [];
|
||||||
if (!parentId) {
|
|
||||||
return jsonRes(res, {
|
|
||||||
data: []
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await authDataset({ req, authToken: true, datasetId: parentId, per: 'r' });
|
|
||||||
|
|
||||||
jsonRes<ParentTreePathItemType[]>(res, {
|
|
||||||
data: await getParents(parentId)
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await authDataset({ req, authToken: true, datasetId: parentId, per: ReadPermissionVal });
|
||||||
|
|
||||||
|
return await getParents(parentId);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getParents(parentId?: string): Promise<ParentTreePathItemType[]> {
|
async function getParents(parentId?: string): Promise<ParentTreePathItemType[]> {
|
||||||
@ -44,3 +31,5 @@ async function getParents(parentId?: string): Promise<ParentTreePathItemType[]>
|
|||||||
|
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import type { SearchTestProps } from '@/global/core/dataset/api.d';
|
||||||
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
|
||||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||||
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
|
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
|
||||||
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
|
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
|
||||||
@ -13,8 +12,10 @@ import {
|
|||||||
checkTeamReRankPermission
|
checkTeamReRankPermission
|
||||||
} from '@fastgpt/service/support/permission/teamLimit';
|
} from '@fastgpt/service/support/permission/teamLimit';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const {
|
const {
|
||||||
datasetId,
|
datasetId,
|
||||||
text,
|
text,
|
||||||
@ -29,8 +30,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
} = req.body as SearchTestProps;
|
} = req.body as SearchTestProps;
|
||||||
|
|
||||||
if (!datasetId || !text) {
|
if (!datasetId || !text) {
|
||||||
throw new Error('缺少参数');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
const start = Date.now();
|
const start = Date.now();
|
||||||
|
|
||||||
// auth dataset role
|
// auth dataset role
|
||||||
@ -39,7 +41,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
datasetId,
|
datasetId,
|
||||||
per: 'r'
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
// auth balance
|
// auth balance
|
||||||
await checkTeamAIPoints(teamId);
|
await checkTeamAIPoints(teamId);
|
||||||
@ -88,14 +90,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonRes<SearchTestResponse>(res, {
|
return {
|
||||||
data: {
|
list: searchRes,
|
||||||
list: searchRes,
|
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
|
||||||
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
|
queryExtensionModel: aiExtensionResult?.model,
|
||||||
queryExtensionModel: aiExtensionResult?.model,
|
...result
|
||||||
...result
|
};
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default NextAPI(handler);
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,10 +1,9 @@
|
|||||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
import type { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
type Props = {};
|
|
||||||
|
|
||||||
export type getDatasetTrainingQueueResponse = {
|
export type getDatasetTrainingQueueResponse = {
|
||||||
rebuildingCount: number;
|
rebuildingCount: number;
|
||||||
@ -12,8 +11,7 @@ export type getDatasetTrainingQueueResponse = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async function handler(
|
async function handler(
|
||||||
req: ApiRequestProps<any, { datasetId: string }>,
|
req: ApiRequestProps<any, { datasetId: string }>
|
||||||
res: ApiResponseType<any>
|
|
||||||
): Promise<getDatasetTrainingQueueResponse> {
|
): Promise<getDatasetTrainingQueueResponse> {
|
||||||
const { datasetId } = req.query;
|
const { datasetId } = req.query;
|
||||||
|
|
||||||
@ -22,7 +20,7 @@ async function handler(
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
datasetId,
|
datasetId,
|
||||||
per: 'r'
|
per: ReadPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const [rebuildingCount, trainingCount] = await Promise.all([
|
const [rebuildingCount, trainingCount] = await Promise.all([
|
||||||
|
|||||||
@ -1,46 +1,37 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||||
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
|
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
await authCert({ req, authToken: true });
|
||||||
await connectToDatabase();
|
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
|
||||||
await authCert({ req, authToken: true });
|
|
||||||
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
|
|
||||||
|
|
||||||
// get queue data
|
// get queue data
|
||||||
// 分别统计 model = vectorModel和agentModel的数量
|
// 分别统计 model = vectorModel和agentModel的数量
|
||||||
const data = await MongoDatasetTraining.aggregate([
|
const data = await MongoDatasetTraining.aggregate([
|
||||||
{
|
{
|
||||||
$match: {
|
$match: {
|
||||||
lockTime: { $lt: new Date('2040/1/1') },
|
lockTime: { $lt: new Date('2040/1/1') },
|
||||||
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
|
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
$group: {
|
|
||||||
_id: '$model',
|
|
||||||
count: { $sum: 1 }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]);
|
},
|
||||||
|
{
|
||||||
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
|
$group: {
|
||||||
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
|
_id: '$model',
|
||||||
|
count: { $sum: 1 }
|
||||||
jsonRes(res, {
|
|
||||||
data: {
|
|
||||||
vectorTrainingCount,
|
|
||||||
agentTrainingCount
|
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
} catch (err) {
|
]);
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
|
||||||
error: err
|
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
|
||||||
});
|
|
||||||
}
|
return {
|
||||||
|
vectorTrainingCount,
|
||||||
|
agentTrainingCount
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
@ -8,7 +8,8 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
|
|||||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||||
|
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
export type rebuildEmbeddingBody = {
|
export type rebuildEmbeddingBody = {
|
||||||
datasetId: string;
|
datasetId: string;
|
||||||
@ -17,10 +18,7 @@ export type rebuildEmbeddingBody = {
|
|||||||
|
|
||||||
export type Response = {};
|
export type Response = {};
|
||||||
|
|
||||||
async function handler(
|
async function handler(req: ApiRequestProps<rebuildEmbeddingBody>): Promise<Response> {
|
||||||
req: ApiRequestProps<rebuildEmbeddingBody>,
|
|
||||||
res: ApiResponseType<any>
|
|
||||||
): Promise<Response> {
|
|
||||||
const { datasetId, vectorModel } = req.body;
|
const { datasetId, vectorModel } = req.body;
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
@ -28,7 +26,7 @@ async function handler(
|
|||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
datasetId,
|
datasetId,
|
||||||
per: 'owner'
|
per: OwnerPermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
// check vector model
|
// check vector model
|
||||||
|
|||||||
@ -1,58 +1,56 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||||
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
|
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import {
|
||||||
|
OwnerPermissionVal,
|
||||||
|
WritePermissionVal
|
||||||
|
} from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const {
|
||||||
await connectToDatabase();
|
id,
|
||||||
const {
|
parentId,
|
||||||
id,
|
name,
|
||||||
parentId,
|
avatar,
|
||||||
name,
|
intro,
|
||||||
avatar,
|
agentModel,
|
||||||
intro,
|
websiteConfig,
|
||||||
permission,
|
externalReadUrl,
|
||||||
agentModel,
|
defaultPermission,
|
||||||
websiteConfig,
|
status
|
||||||
externalReadUrl,
|
} = req.body as DatasetUpdateBody;
|
||||||
status
|
|
||||||
} = req.body as DatasetUpdateBody;
|
|
||||||
|
|
||||||
if (!id) {
|
if (!id) {
|
||||||
throw new Error('缺少参数');
|
return Promise.reject(CommonErrEnum.missingParams);
|
||||||
}
|
|
||||||
|
|
||||||
if (permission) {
|
|
||||||
await authDataset({ req, authToken: true, datasetId: id, per: 'owner' });
|
|
||||||
} else {
|
|
||||||
await authDataset({ req, authToken: true, datasetId: id, per: 'w' });
|
|
||||||
}
|
|
||||||
|
|
||||||
await MongoDataset.findOneAndUpdate(
|
|
||||||
{
|
|
||||||
_id: id
|
|
||||||
},
|
|
||||||
{
|
|
||||||
...(parentId !== undefined && { parentId: parentId || null }),
|
|
||||||
...(name && { name }),
|
|
||||||
...(avatar && { avatar }),
|
|
||||||
...(permission && { permission }),
|
|
||||||
...(agentModel && { agentModel: agentModel.model }),
|
|
||||||
...(websiteConfig && { websiteConfig }),
|
|
||||||
...(status && { status }),
|
|
||||||
...(intro && { intro }),
|
|
||||||
...(externalReadUrl && { externalReadUrl })
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (defaultPermission) {
|
||||||
|
await authDataset({ req, authToken: true, datasetId: id, per: OwnerPermissionVal });
|
||||||
|
} else {
|
||||||
|
await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('update dataset', req.body);
|
||||||
|
|
||||||
|
await MongoDataset.findOneAndUpdate(
|
||||||
|
{
|
||||||
|
_id: id
|
||||||
|
},
|
||||||
|
{
|
||||||
|
...(parentId !== undefined && { parentId: parentId || null }),
|
||||||
|
...(name && { name }),
|
||||||
|
...(avatar && { avatar }),
|
||||||
|
...(agentModel && { agentModel: agentModel.model }),
|
||||||
|
...(websiteConfig && { websiteConfig }),
|
||||||
|
...(status && { status }),
|
||||||
|
...(intro && { intro }),
|
||||||
|
...(externalReadUrl && { externalReadUrl }),
|
||||||
|
defaultPermission
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -1,9 +1,10 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest } from 'next';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
|
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
|
||||||
import { NextAPI } from '@/service/middleware/entry';
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
|
||||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest) {
|
||||||
const { datasetId } = req.query as {
|
const { datasetId } = req.query as {
|
||||||
datasetId: string;
|
datasetId: string;
|
||||||
};
|
};
|
||||||
@ -13,7 +14,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 凭证校验
|
// 凭证校验
|
||||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
const { teamId } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
datasetId,
|
||||||
|
per: WritePermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
await checkExportDatasetLimit({
|
await checkExportDatasetLimit({
|
||||||
teamId,
|
teamId,
|
||||||
|
|||||||
@ -1,41 +1,33 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||||
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
|
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
|
||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
|
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
async function handler(req: NextApiRequest) {
|
||||||
try {
|
const { name, datasetId } = req.body as CreateTrainingUsageProps;
|
||||||
await connectToDatabase();
|
|
||||||
const { name, datasetId } = req.body as CreateTrainingUsageProps;
|
|
||||||
|
|
||||||
const { teamId, tmbId, dataset } = await authDataset({
|
const { teamId, tmbId, dataset } = await authDataset({
|
||||||
req,
|
req,
|
||||||
authToken: true,
|
authToken: true,
|
||||||
authApiKey: true,
|
authApiKey: true,
|
||||||
datasetId,
|
datasetId,
|
||||||
per: 'w'
|
per: WritePermissionVal
|
||||||
});
|
});
|
||||||
|
|
||||||
const { billId } = await createTrainingUsage({
|
const { billId } = await createTrainingUsage({
|
||||||
teamId,
|
teamId,
|
||||||
tmbId,
|
tmbId,
|
||||||
appName: name,
|
appName: name,
|
||||||
billSource: UsageSourceEnum.training,
|
billSource: UsageSourceEnum.training,
|
||||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||||
agentModel: getLLMModel(dataset.agentModel).name
|
agentModel: getLLMModel(dataset.agentModel).name
|
||||||
});
|
});
|
||||||
|
|
||||||
jsonRes<string>(res, {
|
return billId;
|
||||||
data: billId
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
|||||||
@ -14,7 +14,6 @@ import { AppUpdateParams } from '@/global/core/app/api';
|
|||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useI18n } from '@/web/context/I18n';
|
import { useI18n } from '@/web/context/I18n';
|
||||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||||
|
|
||||||
const MoveModal = dynamic(() => import('@/components/common/folder/MoveModal'));
|
const MoveModal = dynamic(() => import('@/components/common/folder/MoveModal'));
|
||||||
|
|
||||||
type AppListContextType = {
|
type AppListContextType = {
|
||||||
|
|||||||
@ -36,7 +36,7 @@ const EditFolderModal = ({
|
|||||||
if (!val) return Promise.resolve('');
|
if (!val) return Promise.resolve('');
|
||||||
return editCallback(val);
|
return editCallback(val);
|
||||||
},
|
},
|
||||||
onSuccess: (res) => {
|
onSuccess: () => {
|
||||||
onClose();
|
onClose();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
46
projects/app/src/pages/dataset/component/MemberManager.tsx
Normal file
46
projects/app/src/pages/dataset/component/MemberManager.tsx
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import { Box, Button, Flex } from '@chakra-ui/react';
|
||||||
|
import React from 'react';
|
||||||
|
import CollaboratorContextProvider, {
|
||||||
|
MemberManagerInputPropsType
|
||||||
|
} from '@/components/support/permission/MemberManager/context';
|
||||||
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
|
||||||
|
function MemberManager({ managePer }: { managePer: MemberManagerInputPropsType }) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
return (
|
||||||
|
<Box mt={4}>
|
||||||
|
<CollaboratorContextProvider {...managePer}>
|
||||||
|
{({ MemberListCard, onOpenManageModal, onOpenAddMember }) => {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Flex alignItems="center" flexDirection="row" justifyContent="space-between" w="full">
|
||||||
|
<Flex flexDirection="row" gap="2">
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="whitePrimary"
|
||||||
|
leftIcon={<MyIcon w="4" name="common/settingLight" />}
|
||||||
|
onClick={onOpenManageModal}
|
||||||
|
>
|
||||||
|
{t('permission.Manage')}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
size="sm"
|
||||||
|
variant="whitePrimary"
|
||||||
|
leftIcon={<MyIcon w="4" name="support/permission/collaborator" />}
|
||||||
|
onClick={onOpenAddMember}
|
||||||
|
>
|
||||||
|
{t('common.Add')}
|
||||||
|
</Button>
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
<MemberListCard mt={2} p={1.5} bg="myGray.100" borderRadius="md" />
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
</CollaboratorContextProvider>
|
||||||
|
</Box>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default MemberManager;
|
||||||
@ -26,8 +26,6 @@ import EditFolderModal, { useEditFolder } from '../../../component/EditFolderMod
|
|||||||
import { TabEnum } from '../../index';
|
import { TabEnum } from '../../index';
|
||||||
import ParentPath from '@/components/common/ParentPaths';
|
import ParentPath from '@/components/common/ParentPaths';
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
|
||||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
|
||||||
|
|
||||||
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { useContextSelector } from 'use-context-selector';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
@ -40,7 +38,6 @@ const Header = ({}: {}) => {
|
|||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const { setLoading } = useSystemStore();
|
const { setLoading } = useSystemStore();
|
||||||
const { userInfo } = useUserStore();
|
|
||||||
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@ -189,7 +186,7 @@ const Header = ({}: {}) => {
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{/* diff collection button */}
|
{/* diff collection button */}
|
||||||
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
|
{datasetDetail.permission.hasWritePer && (
|
||||||
<>
|
<>
|
||||||
{datasetDetail?.type === DatasetTypeEnum.dataset && (
|
{datasetDetail?.type === DatasetTypeEnum.dataset && (
|
||||||
<MyMenu
|
<MyMenu
|
||||||
|
|||||||
@ -37,8 +37,6 @@ import { useDrag } from '@/web/common/hooks/useDrag';
|
|||||||
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
|
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
|
||||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
|
||||||
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import MyBox from '@fastgpt/web/components/common/MyBox';
|
import MyBox from '@fastgpt/web/components/common/MyBox';
|
||||||
import { useContextSelector } from 'use-context-selector';
|
import { useContextSelector } from 'use-context-selector';
|
||||||
@ -53,7 +51,6 @@ const CollectionCard = () => {
|
|||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { userInfo } = useUserStore();
|
|
||||||
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
|
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
|
||||||
|
|
||||||
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
|
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
|
||||||
@ -213,7 +210,7 @@ const CollectionCard = () => {
|
|||||||
}
|
}
|
||||||
bg={dragTargetId === collection._id ? 'primary.100' : ''}
|
bg={dragTargetId === collection._id ? 'primary.100' : ''}
|
||||||
userSelect={'none'}
|
userSelect={'none'}
|
||||||
onDragStart={(e) => {
|
onDragStart={() => {
|
||||||
setDragStartId(collection._id);
|
setDragStartId(collection._id);
|
||||||
}}
|
}}
|
||||||
onDragOver={(e) => {
|
onDragOver={(e) => {
|
||||||
@ -296,7 +293,7 @@ const CollectionCard = () => {
|
|||||||
</Box>
|
</Box>
|
||||||
</Td>
|
</Td>
|
||||||
<Td onClick={(e) => e.stopPropagation()}>
|
<Td onClick={(e) => e.stopPropagation()}>
|
||||||
{collection.canWrite && userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
|
{collection.permission.hasWritePer && (
|
||||||
<MyMenu
|
<MyMenu
|
||||||
width={100}
|
width={100}
|
||||||
offset={[-70, 5]}
|
offset={[-70, 5]}
|
||||||
|
|||||||
@ -35,8 +35,6 @@ import InputDataModal from '../components/InputDataModal';
|
|||||||
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
|
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
|
||||||
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
|
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
|
||||||
import { TabEnum } from '..';
|
import { TabEnum } from '..';
|
||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
|
||||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import { DatasetCollectionTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
|
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
|
||||||
@ -47,18 +45,21 @@ import { usePagination } from '@fastgpt/web/hooks/usePagination';
|
|||||||
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
||||||
import { useI18n } from '@/web/context/I18n';
|
import { useI18n } from '@/web/context/I18n';
|
||||||
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
||||||
|
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
|
||||||
const DataCard = () => {
|
const DataCard = () => {
|
||||||
const BoxRef = useRef<HTMLDivElement>(null);
|
const BoxRef = useRef<HTMLDivElement>(null);
|
||||||
const theme = useTheme();
|
const theme = useTheme();
|
||||||
const lastSearch = useRef('');
|
const lastSearch = useRef('');
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { userInfo } = useUserStore();
|
|
||||||
const { isPc } = useSystemStore();
|
const { isPc } = useSystemStore();
|
||||||
const { collectionId = '', datasetId } = router.query as {
|
const { collectionId = '', datasetId } = router.query as {
|
||||||
collectionId: string;
|
collectionId: string;
|
||||||
datasetId: string;
|
datasetId: string;
|
||||||
};
|
};
|
||||||
|
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
|
||||||
|
|
||||||
const { Loading, setIsLoading } = useLoading({ defaultLoading: true });
|
const { Loading, setIsLoading } = useLoading({ defaultLoading: true });
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { datasetT } = useI18n();
|
const { datasetT } = useI18n();
|
||||||
@ -101,7 +102,7 @@ const DataCard = () => {
|
|||||||
getData(1);
|
getData(1);
|
||||||
lastSearch.current = searchText;
|
lastSearch.current = searchText;
|
||||||
}, 300),
|
}, 300),
|
||||||
[]
|
[searchText]
|
||||||
);
|
);
|
||||||
|
|
||||||
// get file info
|
// get file info
|
||||||
@ -119,10 +120,7 @@ const DataCard = () => {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
const canWrite = useMemo(
|
const canWrite = useMemo(() => datasetDetail.permission.hasWritePer, [datasetDetail]);
|
||||||
() => userInfo?.team?.role !== TeamMemberRoleEnum.visitor && !!collection?.canWrite,
|
|
||||||
[collection?.canWrite, userInfo?.team?.role]
|
|
||||||
);
|
|
||||||
|
|
||||||
const metadataList = useMemo(() => {
|
const metadataList = useMemo(() => {
|
||||||
if (!collection) return [];
|
if (!collection) return [];
|
||||||
@ -291,7 +289,7 @@ const DataCard = () => {
|
|||||||
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
|
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
|
||||||
gridGap={4}
|
gridGap={4}
|
||||||
>
|
>
|
||||||
{datasetDataList.map((item, index) => (
|
{datasetDataList.map((item) => (
|
||||||
<Card
|
<Card
|
||||||
key={item._id}
|
key={item._id}
|
||||||
cursor={'pointer'}
|
cursor={'pointer'}
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useState, useMemo } from 'react';
|
import React from 'react';
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import { Box, Flex, Button, IconButton, Input, Textarea, HStack } from '@chakra-ui/react';
|
import { Box, Flex, Button, IconButton, Input, Textarea, HStack } from '@chakra-ui/react';
|
||||||
import { DeleteIcon } from '@chakra-ui/icons';
|
import { DeleteIcon } from '@chakra-ui/icons';
|
||||||
@ -11,7 +11,6 @@ import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
|
|||||||
import Avatar from '@/components/Avatar';
|
import Avatar from '@/components/Avatar';
|
||||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import PermissionRadio from '@/components/support/permission/Radio';
|
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
|
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
|
||||||
@ -25,10 +24,21 @@ import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
|
|||||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
||||||
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
||||||
|
import DefaultPermissionList from '@/components/support/permission/DefaultPerList';
|
||||||
|
import {
|
||||||
|
DatasetDefaultPermission,
|
||||||
|
DatasetPermissionList
|
||||||
|
} from '@fastgpt/global/support/permission/dataset/constant';
|
||||||
|
import MemberManager from '../../component/MemberManager';
|
||||||
|
import {
|
||||||
|
getCollaboratorList,
|
||||||
|
postUpdateDatasetCollaborators,
|
||||||
|
deleteDatasetCollaborators
|
||||||
|
} from '@/web/core/dataset/api/collaborator';
|
||||||
|
|
||||||
const Info = ({ datasetId }: { datasetId: string }) => {
|
const Info = ({ datasetId }: { datasetId: string }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { datasetT } = useI18n();
|
const { datasetT, commonT } = useI18n();
|
||||||
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
|
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
|
||||||
useContextSelector(DatasetPageContext, (v) => v);
|
useContextSelector(DatasetPageContext, (v) => v);
|
||||||
|
|
||||||
@ -44,7 +54,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
const avatar = watch('avatar');
|
const avatar = watch('avatar');
|
||||||
const vectorModel = watch('vectorModel');
|
const vectorModel = watch('vectorModel');
|
||||||
const agentModel = watch('agentModel');
|
const agentModel = watch('agentModel');
|
||||||
const permission = watch('permission');
|
const defaultPermission = watch('defaultPermission');
|
||||||
|
|
||||||
const { datasetModelList, vectorModelList } = useSystemStore();
|
const { datasetModelList, vectorModelList } = useSystemStore();
|
||||||
|
|
||||||
@ -233,20 +243,46 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
<FormLabel flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</FormLabel>
|
<FormLabel flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</FormLabel>
|
||||||
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
|
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
|
||||||
</Flex>
|
</Flex>
|
||||||
{datasetDetail.isOwner && (
|
|
||||||
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
|
{datasetDetail.permission.hasManagePer && (
|
||||||
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
|
<>
|
||||||
{t('user.Permission')}
|
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
|
||||||
</FormLabel>
|
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
<Box>
|
{commonT('permission.Default permission')}
|
||||||
<PermissionRadio
|
</FormLabel>
|
||||||
value={permission}
|
<DefaultPermissionList
|
||||||
onChange={(e) => {
|
w="320px"
|
||||||
setValue('permission', e);
|
per={defaultPermission}
|
||||||
}}
|
defaultPer={DatasetDefaultPermission}
|
||||||
|
onChange={(v) => setValue('defaultPermission', v)}
|
||||||
/>
|
/>
|
||||||
</Box>
|
</Flex>
|
||||||
</Flex>
|
|
||||||
|
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
|
||||||
|
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
|
||||||
|
{commonT('permission.Collaborator')}
|
||||||
|
</FormLabel>
|
||||||
|
<Box flex={1}>
|
||||||
|
<MemberManager
|
||||||
|
managePer={{
|
||||||
|
permission: datasetDetail.permission,
|
||||||
|
onGetCollaboratorList: () => getCollaboratorList(datasetId),
|
||||||
|
permissionList: DatasetPermissionList,
|
||||||
|
onUpdateCollaborators: (body) =>
|
||||||
|
postUpdateDatasetCollaborators({
|
||||||
|
...body,
|
||||||
|
datasetId
|
||||||
|
}),
|
||||||
|
onDelOneCollaborator: (tmbId) =>
|
||||||
|
deleteDatasetCollaborators({
|
||||||
|
datasetId,
|
||||||
|
tmbId
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
|
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
|
||||||
@ -259,7 +295,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
|
|||||||
>
|
>
|
||||||
{t('common.Save')}
|
{t('common.Save')}
|
||||||
</Button>
|
</Button>
|
||||||
{datasetDetail.isOwner && (
|
{datasetDetail.permission.isOwner && (
|
||||||
<IconButton
|
<IconButton
|
||||||
isLoading={btnLoading}
|
isLoading={btnLoading}
|
||||||
icon={<DeleteIcon />}
|
icon={<DeleteIcon />}
|
||||||
|
|||||||
@ -365,9 +365,11 @@ const InputDataModal = ({
|
|||||||
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
|
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
|
||||||
{t('common.Close')}
|
{t('common.Close')}
|
||||||
</Button>
|
</Button>
|
||||||
<MyTooltip label={collection.canWrite ? '' : t('dataset.data.Can not edit')}>
|
<MyTooltip
|
||||||
|
label={collection.permission.hasWritePer ? '' : t('dataset.data.Can not edit')}
|
||||||
|
>
|
||||||
<Button
|
<Button
|
||||||
isDisabled={!collection.canWrite}
|
isDisabled={!collection.permission.hasWritePer}
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
|
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
|
||||||
>
|
>
|
||||||
|
|||||||
@ -1,7 +1,5 @@
|
|||||||
import React, { useCallback } from 'react';
|
import React, { useCallback } from 'react';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
|
||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
|
||||||
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
|
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
import Avatar from '@/components/Avatar';
|
import Avatar from '@/components/Avatar';
|
||||||
@ -29,7 +27,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
const { datasetT } = useI18n();
|
const { datasetT } = useI18n();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const query = router.query;
|
const query = router.query;
|
||||||
const { userInfo } = useUserStore();
|
|
||||||
const { isPc } = useSystemStore();
|
const { isPc } = useSystemStore();
|
||||||
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
|
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
|
||||||
useContextSelector(DatasetPageContext, (v) => v);
|
useContextSelector(DatasetPageContext, (v) => v);
|
||||||
@ -41,7 +38,7 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
|
|||||||
icon: 'common/overviewLight'
|
icon: 'common/overviewLight'
|
||||||
},
|
},
|
||||||
{ label: t('core.dataset.test.Search Test'), id: TabEnum.test, icon: 'kbTest' },
|
{ label: t('core.dataset.test.Search Test'), id: TabEnum.test, icon: 'kbTest' },
|
||||||
...(userInfo?.team.permission.hasManagePer || datasetDetail.isOwner
|
...(datasetDetail.permission.hasManagePer
|
||||||
? [{ label: t('common.Config'), id: TabEnum.info, icon: 'common/settingLight' }]
|
? [{ label: t('common.Config'), id: TabEnum.info, icon: 'common/settingLight' }]
|
||||||
: [])
|
: [])
|
||||||
];
|
];
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React, { useCallback, useState } from 'react';
|
import React, { useCallback } from 'react';
|
||||||
import { Box, Flex, Button, ModalFooter, ModalBody, Input } from '@chakra-ui/react';
|
import { Box, Flex, Button, ModalFooter, ModalBody, Input } from '@chakra-ui/react';
|
||||||
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
|
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
|
||||||
import { useForm } from 'react-hook-form';
|
import { useForm } from 'react-hook-form';
|
||||||
@ -20,6 +20,7 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
|
|||||||
import AIModelSelector from '@/components/Select/AIModelSelector';
|
import AIModelSelector from '@/components/Select/AIModelSelector';
|
||||||
import { useI18n } from '@/web/context/I18n';
|
import { useI18n } from '@/web/context/I18n';
|
||||||
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
||||||
|
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
|
||||||
|
|
||||||
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@ -38,7 +39,8 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
|
|||||||
name: '',
|
name: '',
|
||||||
intro: '',
|
intro: '',
|
||||||
vectorModel: filterNotHiddenVectorModelList[0].model,
|
vectorModel: filterNotHiddenVectorModelList[0].model,
|
||||||
agentModel: datasetModelList[0].model
|
agentModel: datasetModelList[0].model,
|
||||||
|
defaultPermission: DatasetDefaultPermission
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const avatar = watch('avatar');
|
const avatar = watch('avatar');
|
||||||
|
|||||||
496
projects/app/src/pages/dataset/list/component/List.tsx
Normal file
496
projects/app/src/pages/dataset/list/component/List.tsx
Normal file
@ -0,0 +1,496 @@
|
|||||||
|
import { useDrag } from '@/web/common/hooks/useDrag';
|
||||||
|
import { delDatasetById, getDatasetById, putDatasetById } from '@/web/core/dataset/api';
|
||||||
|
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
||||||
|
import { Box, Flex, Grid } from '@chakra-ui/react';
|
||||||
|
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
||||||
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
|
import React, { useMemo, useRef, useState } from 'react';
|
||||||
|
import { useRouter } from 'next/router';
|
||||||
|
import PermissionIconText from '@/components/support/permission/IconText';
|
||||||
|
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
||||||
|
import Avatar from '@/components/Avatar';
|
||||||
|
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
||||||
|
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||||
|
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
||||||
|
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||||
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
|
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
|
||||||
|
import { downloadFetch } from '@/web/common/system/utils';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||||
|
import dynamic from 'next/dynamic';
|
||||||
|
import { EditResourceInfoFormType } from '@/components/common/Modal/EditResourceModal';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
import { DatasetContext } from '../context';
|
||||||
|
import {
|
||||||
|
DatasetDefaultPermission,
|
||||||
|
DatasetPermissionList
|
||||||
|
} from '@fastgpt/global/support/permission/dataset/constant';
|
||||||
|
import ConfigPerModal from '@/components/support/permission/ConfigPerModal';
|
||||||
|
import {
|
||||||
|
deleteDatasetCollaborators,
|
||||||
|
getCollaboratorList,
|
||||||
|
postUpdateDatasetCollaborators
|
||||||
|
} from '@/web/core/dataset/api/collaborator';
|
||||||
|
import FolderSlideCard from '@/components/common/folder/SlideCard';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
||||||
|
|
||||||
|
const MoveModal = dynamic(() => import('./MoveModal'), { ssr: false });
|
||||||
|
|
||||||
|
function List() {
|
||||||
|
const { setLoading, isPc } = useSystemStore();
|
||||||
|
const { toast } = useToast();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { refetch } = useContextSelector(DatasetContext, (v) => v);
|
||||||
|
const [editPerDatasetIndex, setEditPerDatasetIndex] = useState<number>();
|
||||||
|
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
|
||||||
|
|
||||||
|
const editPerDataset = useMemo(
|
||||||
|
() => (editPerDatasetIndex !== undefined ? myDatasets[editPerDatasetIndex] : undefined),
|
||||||
|
[editPerDatasetIndex, myDatasets]
|
||||||
|
);
|
||||||
|
|
||||||
|
const router = useRouter();
|
||||||
|
|
||||||
|
const { parentId } = router.query as { parentId: string };
|
||||||
|
|
||||||
|
const { data: folderDetail, refetch: refetchFolderDetail } = useQuery(
|
||||||
|
['folderDetail', parentId, myDatasets],
|
||||||
|
() => (parentId ? getDatasetById(parentId) : undefined)
|
||||||
|
);
|
||||||
|
|
||||||
|
const { mutate: exportDataset } = useRequest({
|
||||||
|
mutationFn: async (dataset: DatasetItemType) => {
|
||||||
|
setLoading(true);
|
||||||
|
await checkTeamExportDatasetLimit(dataset._id);
|
||||||
|
|
||||||
|
await downloadFetch({
|
||||||
|
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
|
||||||
|
filename: `${dataset.name}.csv`
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSuccess() {
|
||||||
|
toast({
|
||||||
|
status: 'success',
|
||||||
|
title: t('core.dataset.Start export')
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSettled() {
|
||||||
|
setLoading(false);
|
||||||
|
},
|
||||||
|
errorToast: t('dataset.Export Dataset Limit Error')
|
||||||
|
});
|
||||||
|
|
||||||
|
const { mutate: onclickDelDataset } = useRequest({
|
||||||
|
mutationFn: async (id: string) => {
|
||||||
|
setLoading(true);
|
||||||
|
await delDatasetById(id);
|
||||||
|
return id;
|
||||||
|
},
|
||||||
|
onSuccess(id: string) {
|
||||||
|
setMyDatasets(myDatasets.filter((item) => item._id !== id));
|
||||||
|
},
|
||||||
|
onSettled() {
|
||||||
|
setLoading(false);
|
||||||
|
},
|
||||||
|
successToast: t('common.Delete Success'),
|
||||||
|
errorToast: t('dataset.Delete Dataset Error')
|
||||||
|
});
|
||||||
|
|
||||||
|
const EditResourceModal = dynamic(() => import('@/components/common/Modal/EditResourceModal'));
|
||||||
|
const [editedDataset, setEditedDataset] = useState<EditResourceInfoFormType>();
|
||||||
|
|
||||||
|
const DeleteTipsMap = useRef({
|
||||||
|
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
|
||||||
|
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
|
||||||
|
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
|
||||||
|
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
|
||||||
|
});
|
||||||
|
|
||||||
|
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
|
||||||
|
useDrag();
|
||||||
|
|
||||||
|
const formatDatasets = useMemo(
|
||||||
|
() =>
|
||||||
|
myDatasets.map((item) => {
|
||||||
|
return {
|
||||||
|
...item,
|
||||||
|
label: DatasetTypeMap[item.type]?.label,
|
||||||
|
icon: DatasetTypeMap[item.type]?.icon
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
[myDatasets]
|
||||||
|
);
|
||||||
|
|
||||||
|
const { openConfirm, ConfirmModal } = useConfirm({
|
||||||
|
type: 'delete'
|
||||||
|
});
|
||||||
|
|
||||||
|
const onDeleteDataset = (id: string) => {
|
||||||
|
openConfirm(
|
||||||
|
() => onclickDelDataset(id),
|
||||||
|
undefined,
|
||||||
|
DeleteTipsMap.current[DatasetTypeEnum.dataset]
|
||||||
|
)();
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Flex>
|
||||||
|
{formatDatasets.length > 0 && (
|
||||||
|
<Grid
|
||||||
|
flexGrow={1}
|
||||||
|
py={5}
|
||||||
|
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
|
||||||
|
gridGap={5}
|
||||||
|
userSelect={'none'}
|
||||||
|
>
|
||||||
|
{formatDatasets.map((dataset, index) => (
|
||||||
|
<MyTooltip
|
||||||
|
key={dataset._id}
|
||||||
|
label={
|
||||||
|
<Flex flexDirection={'column'} alignItems={'center'}>
|
||||||
|
<Box fontSize={'xs'} color={'myGray.500'}>
|
||||||
|
{dataset.type === DatasetTypeEnum.folder ? '打开文件夹' : '打开知识库'}
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<Box
|
||||||
|
display={'flex'}
|
||||||
|
flexDirection={'column'}
|
||||||
|
py={3}
|
||||||
|
px={5}
|
||||||
|
cursor={'pointer'}
|
||||||
|
borderWidth={1.5}
|
||||||
|
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
|
||||||
|
bg={'white'}
|
||||||
|
borderRadius={'md'}
|
||||||
|
minH={'130px'}
|
||||||
|
position={'relative'}
|
||||||
|
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
|
||||||
|
draggable
|
||||||
|
onDragStart={() => {
|
||||||
|
setDragStartId(dataset._id);
|
||||||
|
}}
|
||||||
|
onDragOver={(e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const targetId = e.currentTarget.getAttribute('data-drag-id');
|
||||||
|
if (!targetId) return;
|
||||||
|
DatasetTypeEnum.folder && setDragTargetId(targetId);
|
||||||
|
}}
|
||||||
|
onDragLeave={(e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
setDragTargetId(undefined);
|
||||||
|
}}
|
||||||
|
onDrop={async (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
|
||||||
|
// update parentId
|
||||||
|
try {
|
||||||
|
await putDatasetById({
|
||||||
|
id: dragStartId,
|
||||||
|
parentId: dragTargetId
|
||||||
|
});
|
||||||
|
refetch();
|
||||||
|
} catch (error) {}
|
||||||
|
setDragTargetId(undefined);
|
||||||
|
}}
|
||||||
|
_hover={{
|
||||||
|
borderColor: 'primary.300',
|
||||||
|
boxShadow: '1.5',
|
||||||
|
'& .delete': {
|
||||||
|
display: 'block'
|
||||||
|
},
|
||||||
|
'& .more': {
|
||||||
|
display: 'flex'
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
onClick={() => {
|
||||||
|
if (dataset.type === DatasetTypeEnum.folder) {
|
||||||
|
router.push({
|
||||||
|
pathname: '/dataset/list',
|
||||||
|
query: {
|
||||||
|
parentId: dataset._id
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
router.push({
|
||||||
|
pathname: '/dataset/detail',
|
||||||
|
query: {
|
||||||
|
datasetId: dataset._id
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{dataset.permission.hasWritePer && (
|
||||||
|
<Box
|
||||||
|
className="more"
|
||||||
|
display="none"
|
||||||
|
position={'absolute'}
|
||||||
|
top={3}
|
||||||
|
right={3}
|
||||||
|
borderRadius={'md'}
|
||||||
|
_hover={{
|
||||||
|
color: 'primary.500',
|
||||||
|
'& .icon': {
|
||||||
|
bg: 'myGray.100'
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<MyMenu
|
||||||
|
width={120}
|
||||||
|
Button={
|
||||||
|
<Box w={'22px'} h={'22px'}>
|
||||||
|
<MyIcon
|
||||||
|
className="icon"
|
||||||
|
name={'more'}
|
||||||
|
h={'16px'}
|
||||||
|
w={'16px'}
|
||||||
|
px={1}
|
||||||
|
py={1}
|
||||||
|
borderRadius={'md'}
|
||||||
|
cursor={'pointer'}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
}
|
||||||
|
menuList={[
|
||||||
|
{
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
icon: 'edit',
|
||||||
|
label: '编辑信息',
|
||||||
|
onClick: () =>
|
||||||
|
setEditedDataset({
|
||||||
|
id: dataset._id,
|
||||||
|
name: dataset.name,
|
||||||
|
intro: dataset.intro,
|
||||||
|
avatar: dataset.avatar
|
||||||
|
})
|
||||||
|
},
|
||||||
|
{
|
||||||
|
icon: 'common/file/move',
|
||||||
|
label: t('Move'),
|
||||||
|
onClick: () => setMoveDataId(dataset._id)
|
||||||
|
},
|
||||||
|
{
|
||||||
|
icon: 'export',
|
||||||
|
label: t('Export'),
|
||||||
|
onClick: () => {
|
||||||
|
exportDataset(dataset);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...(dataset.permission.hasManagePer
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
icon: 'support/team/key',
|
||||||
|
label: t('permission.Permission'),
|
||||||
|
onClick: () => setEditPerDatasetIndex(index)
|
||||||
|
}
|
||||||
|
]
|
||||||
|
: [])
|
||||||
|
]
|
||||||
|
},
|
||||||
|
...(dataset.permission.hasManagePer
|
||||||
|
? [
|
||||||
|
{
|
||||||
|
children: [
|
||||||
|
{
|
||||||
|
icon: 'delete',
|
||||||
|
label: t('common.Delete'),
|
||||||
|
type: 'danger' as 'danger',
|
||||||
|
onClick: () => {
|
||||||
|
openConfirm(
|
||||||
|
() => onclickDelDataset(dataset._id),
|
||||||
|
undefined,
|
||||||
|
DeleteTipsMap.current[dataset.type]
|
||||||
|
)();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
: [])
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<Flex alignItems={'center'} h={'38px'}>
|
||||||
|
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
|
||||||
|
<Box mx={3} className="textEllipsis3">
|
||||||
|
{dataset.name}
|
||||||
|
</Box>
|
||||||
|
</Flex>
|
||||||
|
<Box
|
||||||
|
flex={1}
|
||||||
|
className={'textEllipsis3'}
|
||||||
|
py={1}
|
||||||
|
wordBreak={'break-all'}
|
||||||
|
fontSize={'xs'}
|
||||||
|
color={'myGray.500'}
|
||||||
|
>
|
||||||
|
{dataset.intro ||
|
||||||
|
(dataset.type === DatasetTypeEnum.folder
|
||||||
|
? t('core.dataset.Folder placeholder')
|
||||||
|
: t('core.dataset.Intro Placeholder'))}
|
||||||
|
</Box>
|
||||||
|
<Flex alignItems={'center'} fontSize={'sm'}>
|
||||||
|
<Box flex={1}>
|
||||||
|
<PermissionIconText
|
||||||
|
defaultPermission={dataset.defaultPermission}
|
||||||
|
color={'myGray.600'}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
{dataset.type !== DatasetTypeEnum.folder && (
|
||||||
|
<DatasetTypeTag type={dataset.type} py={1} px={2} />
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
</Box>
|
||||||
|
</MyTooltip>
|
||||||
|
))}
|
||||||
|
</Grid>
|
||||||
|
)}
|
||||||
|
{myDatasets.length === 0 && (
|
||||||
|
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')} flexGrow="1"></EmptyTip>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!!folderDetail && isPc && (
|
||||||
|
<Box pt={[4, 6]} ml={[4, 6]}>
|
||||||
|
<FolderSlideCard
|
||||||
|
refreshDeps={[folderDetail._id]}
|
||||||
|
name={folderDetail.name}
|
||||||
|
intro={folderDetail.intro}
|
||||||
|
onEdit={() => {
|
||||||
|
setEditedDataset({
|
||||||
|
id: folderDetail._id,
|
||||||
|
name: folderDetail.name,
|
||||||
|
intro: folderDetail.intro
|
||||||
|
});
|
||||||
|
}}
|
||||||
|
onMove={() => setMoveDataId(folderDetail._id)}
|
||||||
|
deleteTip={t('dataset.deleteFolderTips')}
|
||||||
|
onDelete={() => onDeleteDataset(folderDetail._id)}
|
||||||
|
defaultPer={{
|
||||||
|
value: folderDetail.defaultPermission,
|
||||||
|
defaultValue: DatasetDefaultPermission,
|
||||||
|
onChange: (e) => {
|
||||||
|
return putDatasetById({
|
||||||
|
id: folderDetail._id,
|
||||||
|
defaultPermission: e
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
managePer={{
|
||||||
|
permission: folderDetail.permission,
|
||||||
|
onGetCollaboratorList: () => getCollaboratorList(folderDetail._id),
|
||||||
|
permissionList: DatasetPermissionList,
|
||||||
|
onUpdateCollaborators: ({
|
||||||
|
tmbIds,
|
||||||
|
permission
|
||||||
|
}: {
|
||||||
|
tmbIds: string[];
|
||||||
|
permission: number;
|
||||||
|
}) => {
|
||||||
|
return postUpdateDatasetCollaborators({
|
||||||
|
tmbIds,
|
||||||
|
permission,
|
||||||
|
datasetId: folderDetail._id
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onDelOneCollaborator: (tmbId: string) =>
|
||||||
|
deleteDatasetCollaborators({
|
||||||
|
datasetId: folderDetail._id,
|
||||||
|
tmbId
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Box>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
|
||||||
|
<ConfirmModal />
|
||||||
|
|
||||||
|
{editedDataset && (
|
||||||
|
<EditResourceModal
|
||||||
|
{...editedDataset}
|
||||||
|
title={''}
|
||||||
|
onClose={() => setEditedDataset(undefined)}
|
||||||
|
onEdit={async (data) => {
|
||||||
|
await putDatasetById({
|
||||||
|
id: editedDataset.id,
|
||||||
|
name: data.name,
|
||||||
|
intro: data.intro,
|
||||||
|
avatar: data.avatar
|
||||||
|
});
|
||||||
|
loadMyDatasets(parentId);
|
||||||
|
refetchFolderDetail();
|
||||||
|
setEditedDataset(undefined);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!!moveDataId && (
|
||||||
|
<MoveModal
|
||||||
|
moveDataId={moveDataId}
|
||||||
|
onClose={() => setMoveDataId('')}
|
||||||
|
onSuccess={() => {
|
||||||
|
refetch();
|
||||||
|
refetchFolderDetail();
|
||||||
|
setMoveDataId('');
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!!editPerDataset && (
|
||||||
|
<ConfigPerModal
|
||||||
|
avatar={editPerDataset.avatar}
|
||||||
|
name={editPerDataset.name}
|
||||||
|
defaultPer={{
|
||||||
|
value: editPerDataset.defaultPermission,
|
||||||
|
defaultValue: DatasetDefaultPermission,
|
||||||
|
onChange: async (e) => {
|
||||||
|
await putDatasetById({
|
||||||
|
id: editPerDataset._id,
|
||||||
|
defaultPermission: e
|
||||||
|
});
|
||||||
|
refetch();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
managePer={{
|
||||||
|
permission: editPerDataset.permission,
|
||||||
|
onGetCollaboratorList: () => getCollaboratorList(editPerDataset._id),
|
||||||
|
permissionList: DatasetPermissionList,
|
||||||
|
onUpdateCollaborators: ({
|
||||||
|
tmbIds,
|
||||||
|
permission
|
||||||
|
}: {
|
||||||
|
tmbIds: string[];
|
||||||
|
permission: number;
|
||||||
|
}) => {
|
||||||
|
return postUpdateDatasetCollaborators({
|
||||||
|
tmbIds,
|
||||||
|
permission,
|
||||||
|
datasetId: editPerDataset._id
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onDelOneCollaborator: (tmbId: string) =>
|
||||||
|
deleteDatasetCollaborators({
|
||||||
|
datasetId: editPerDataset._id,
|
||||||
|
tmbId
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
onClose={() => setEditPerDatasetIndex(undefined)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default List;
|
||||||
58
projects/app/src/pages/dataset/list/context.tsx
Normal file
58
projects/app/src/pages/dataset/list/context.tsx
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
import { getDatasetPaths } from '@/web/core/dataset/api';
|
||||||
|
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
||||||
|
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||||
|
import { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type';
|
||||||
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { useTranslation } from 'next-i18next';
|
||||||
|
import { useRouter } from 'next/router';
|
||||||
|
import React from 'react';
|
||||||
|
import { createContext } from 'use-context-selector';
|
||||||
|
|
||||||
|
export type DatasetContextType = {
|
||||||
|
refetch: () => void;
|
||||||
|
isFetching: boolean;
|
||||||
|
paths: ParentTreePathItemType[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export const DatasetContext = createContext<DatasetContextType>({
|
||||||
|
refetch: () => {},
|
||||||
|
isFetching: false,
|
||||||
|
paths: []
|
||||||
|
});
|
||||||
|
|
||||||
|
function DatasetContextProvider({ children }: { children: React.ReactNode }) {
|
||||||
|
const router = useRouter();
|
||||||
|
const { toast } = useToast();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const { parentId } = router.query as { parentId: string };
|
||||||
|
const { loadMyDatasets } = useDatasetStore();
|
||||||
|
|
||||||
|
const { data, refetch, isFetching } = useQuery(
|
||||||
|
['loadDataset', parentId],
|
||||||
|
() => {
|
||||||
|
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
onError(err) {
|
||||||
|
toast({
|
||||||
|
status: 'error',
|
||||||
|
title: t(getErrText(err))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const paths = data?.[1] || [];
|
||||||
|
|
||||||
|
const contextValue = {
|
||||||
|
refetch,
|
||||||
|
isFetching,
|
||||||
|
paths
|
||||||
|
};
|
||||||
|
|
||||||
|
return <DatasetContext.Provider value={contextValue}>{children}</DatasetContext.Provider>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default DatasetContextProvider;
|
||||||
@ -1,144 +1,42 @@
|
|||||||
import React, { useMemo, useRef, useState } from 'react';
|
import React from 'react';
|
||||||
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
|
import { Box, Flex, useDisclosure, Image, Button } from '@chakra-ui/react';
|
||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import PageContainer from '@/components/PageContainer';
|
import PageContainer from '@/components/PageContainer';
|
||||||
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
|
|
||||||
import { AddIcon } from '@chakra-ui/icons';
|
import { AddIcon } from '@chakra-ui/icons';
|
||||||
import { useQuery } from '@tanstack/react-query';
|
import { postCreateDataset } from '@/web/core/dataset/api';
|
||||||
import {
|
|
||||||
delDatasetById,
|
|
||||||
getDatasetPaths,
|
|
||||||
putDatasetById,
|
|
||||||
postCreateDataset
|
|
||||||
} from '@/web/core/dataset/api';
|
|
||||||
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
|
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import Avatar from '@/components/Avatar';
|
|
||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import { serviceSideProps } from '@/web/common/utils/i18n';
|
import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { FolderImgUrl, FolderIcon } from '@fastgpt/global/common/file/image/constants';
|
import { FolderImgUrl, FolderIcon } from '@fastgpt/global/common/file/image/constants';
|
||||||
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
import MyMenu from '@fastgpt/web/components/common/MyMenu';
|
||||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
|
||||||
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
|
|
||||||
import EditFolderModal, { useEditFolder } from '../component/EditFolderModal';
|
import EditFolderModal, { useEditFolder } from '../component/EditFolderModal';
|
||||||
import { useDrag } from '@/web/common/hooks/useDrag';
|
|
||||||
import { useUserStore } from '@/web/support/user/useUserStore';
|
import { useUserStore } from '@/web/support/user/useUserStore';
|
||||||
import PermissionIconText from '@/components/support/permission/IconText';
|
|
||||||
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
|
|
||||||
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
|
||||||
import ParentPaths from '@/components/common/ParentPaths';
|
import ParentPaths from '@/components/common/ParentPaths';
|
||||||
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
|
||||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
|
||||||
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
|
||||||
import { downloadFetch } from '@/web/common/system/utils';
|
import List from './component/List';
|
||||||
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
|
import { DatasetContext } from './context';
|
||||||
|
import DatasetContextProvider from './context';
|
||||||
|
import { useContextSelector } from 'use-context-selector';
|
||||||
|
|
||||||
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
||||||
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
|
|
||||||
|
|
||||||
const Dataset = () => {
|
const Dataset = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { toast } = useToast();
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { parentId } = router.query as { parentId: string };
|
|
||||||
const { setLoading } = useSystemStore();
|
|
||||||
const { userInfo } = useUserStore();
|
const { userInfo } = useUserStore();
|
||||||
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
|
const { myDatasets } = useDatasetStore();
|
||||||
|
const { parentId } = router.query as { parentId: string };
|
||||||
const DeleteTipsMap = useRef({
|
|
||||||
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
|
|
||||||
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
|
|
||||||
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
|
|
||||||
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
|
|
||||||
});
|
|
||||||
|
|
||||||
const { openConfirm, ConfirmModal } = useConfirm({
|
|
||||||
type: 'delete'
|
|
||||||
});
|
|
||||||
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
|
|
||||||
title: t('Rename')
|
|
||||||
});
|
|
||||||
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
|
|
||||||
useDrag();
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
isOpen: isOpenCreateModal,
|
isOpen: isOpenCreateModal,
|
||||||
onOpen: onOpenCreateModal,
|
onOpen: onOpenCreateModal,
|
||||||
onClose: onCloseCreateModal
|
onClose: onCloseCreateModal
|
||||||
} = useDisclosure();
|
} = useDisclosure();
|
||||||
|
|
||||||
const { editFolderData, setEditFolderData } = useEditFolder();
|
const { editFolderData, setEditFolderData } = useEditFolder();
|
||||||
|
const { paths, refetch, isFetching } = useContextSelector(DatasetContext, (v) => v);
|
||||||
/* 点击删除 */
|
|
||||||
const { mutate: onclickDelDataset } = useRequest({
|
|
||||||
mutationFn: async (id: string) => {
|
|
||||||
setLoading(true);
|
|
||||||
await delDatasetById(id);
|
|
||||||
return id;
|
|
||||||
},
|
|
||||||
onSuccess(id: string) {
|
|
||||||
setMyDatasets(myDatasets.filter((item) => item._id !== id));
|
|
||||||
},
|
|
||||||
onSettled() {
|
|
||||||
setLoading(false);
|
|
||||||
},
|
|
||||||
successToast: t('common.Delete Success'),
|
|
||||||
errorToast: t('dataset.Delete Dataset Error')
|
|
||||||
});
|
|
||||||
// check export limit
|
|
||||||
const { mutate: exportDataset } = useRequest({
|
|
||||||
mutationFn: async (dataset: DatasetItemType) => {
|
|
||||||
setLoading(true);
|
|
||||||
await checkTeamExportDatasetLimit(dataset._id);
|
|
||||||
|
|
||||||
await downloadFetch({
|
|
||||||
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
|
|
||||||
filename: `${dataset.name}.csv`
|
|
||||||
});
|
|
||||||
},
|
|
||||||
onSuccess() {
|
|
||||||
toast({
|
|
||||||
status: 'success',
|
|
||||||
title: t('core.dataset.Start export')
|
|
||||||
});
|
|
||||||
},
|
|
||||||
onSettled() {
|
|
||||||
setLoading(false);
|
|
||||||
},
|
|
||||||
errorToast: t('dataset.Export Dataset Limit Error')
|
|
||||||
});
|
|
||||||
|
|
||||||
const { data, refetch, isFetching } = useQuery(
|
|
||||||
['loadDataset', parentId],
|
|
||||||
() => {
|
|
||||||
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
|
|
||||||
},
|
|
||||||
{
|
|
||||||
onError(err) {
|
|
||||||
toast({
|
|
||||||
status: 'error',
|
|
||||||
title: t(getErrText(err))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const paths = data?.[1] || [];
|
|
||||||
|
|
||||||
const formatDatasets = useMemo(
|
|
||||||
() =>
|
|
||||||
myDatasets.map((item) => {
|
|
||||||
return {
|
|
||||||
...item,
|
|
||||||
label: DatasetTypeMap[item.type]?.label,
|
|
||||||
icon: DatasetTypeMap[item.type]?.icon
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
[myDatasets]
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<PageContainer
|
<PageContainer
|
||||||
@ -148,7 +46,7 @@ const Dataset = () => {
|
|||||||
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
|
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
|
||||||
{/* url path */}
|
{/* url path */}
|
||||||
<ParentPaths
|
<ParentPaths
|
||||||
paths={paths.map((path, i) => ({
|
paths={paths.map((path) => ({
|
||||||
parentId: path.parentId,
|
parentId: path.parentId,
|
||||||
parentName: path.parentName
|
parentName: path.parentName
|
||||||
}))}
|
}))}
|
||||||
@ -208,254 +106,7 @@ const Dataset = () => {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</Flex>
|
</Flex>
|
||||||
<Grid
|
<List />
|
||||||
py={5}
|
|
||||||
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
|
|
||||||
gridGap={5}
|
|
||||||
userSelect={'none'}
|
|
||||||
>
|
|
||||||
{formatDatasets.map((dataset) => (
|
|
||||||
<Box
|
|
||||||
display={'flex'}
|
|
||||||
flexDirection={'column'}
|
|
||||||
key={dataset._id}
|
|
||||||
py={3}
|
|
||||||
px={5}
|
|
||||||
cursor={'pointer'}
|
|
||||||
borderWidth={1.5}
|
|
||||||
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
|
|
||||||
bg={'white'}
|
|
||||||
borderRadius={'md'}
|
|
||||||
minH={'130px'}
|
|
||||||
position={'relative'}
|
|
||||||
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
|
|
||||||
draggable
|
|
||||||
onDragStart={(e) => {
|
|
||||||
setDragStartId(dataset._id);
|
|
||||||
}}
|
|
||||||
onDragOver={(e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
const targetId = e.currentTarget.getAttribute('data-drag-id');
|
|
||||||
if (!targetId) return;
|
|
||||||
DatasetTypeEnum.folder && setDragTargetId(targetId);
|
|
||||||
}}
|
|
||||||
onDragLeave={(e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
setDragTargetId(undefined);
|
|
||||||
}}
|
|
||||||
onDrop={async (e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
|
|
||||||
// update parentId
|
|
||||||
try {
|
|
||||||
await putDatasetById({
|
|
||||||
id: dragStartId,
|
|
||||||
parentId: dragTargetId
|
|
||||||
});
|
|
||||||
refetch();
|
|
||||||
} catch (error) {}
|
|
||||||
setDragTargetId(undefined);
|
|
||||||
}}
|
|
||||||
_hover={{
|
|
||||||
borderColor: 'primary.300',
|
|
||||||
boxShadow: '1.5',
|
|
||||||
'& .delete': {
|
|
||||||
display: 'block'
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
onClick={() => {
|
|
||||||
if (dataset.type === DatasetTypeEnum.folder) {
|
|
||||||
router.push({
|
|
||||||
pathname: '/dataset/list',
|
|
||||||
query: {
|
|
||||||
parentId: dataset._id
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
router.push({
|
|
||||||
pathname: '/dataset/detail',
|
|
||||||
query: {
|
|
||||||
datasetId: dataset._id
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{userInfo?.team?.permission.hasWritePer && dataset.isOwner && (
|
|
||||||
<Box
|
|
||||||
position={'absolute'}
|
|
||||||
top={3}
|
|
||||||
right={3}
|
|
||||||
borderRadius={'md'}
|
|
||||||
_hover={{
|
|
||||||
color: 'primary.500',
|
|
||||||
'& .icon': {
|
|
||||||
bg: 'myGray.100'
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation();
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<MyMenu
|
|
||||||
Button={
|
|
||||||
<Box w={'22px'} h={'22px'}>
|
|
||||||
<MyIcon
|
|
||||||
className="icon"
|
|
||||||
name={'more'}
|
|
||||||
h={'16px'}
|
|
||||||
w={'16px'}
|
|
||||||
px={1}
|
|
||||||
py={1}
|
|
||||||
borderRadius={'md'}
|
|
||||||
cursor={'pointer'}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
}
|
|
||||||
menuList={[
|
|
||||||
{
|
|
||||||
children: [
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon name={'edit'} w={'14px'} mr={2} />
|
|
||||||
{t('Rename')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () =>
|
|
||||||
onOpenTitleModal({
|
|
||||||
defaultVal: dataset.name,
|
|
||||||
onSuccess: (val) => {
|
|
||||||
if (val === dataset.name || !val) return;
|
|
||||||
putDatasetById({
|
|
||||||
id: dataset._id,
|
|
||||||
name: val
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon name={'common/file/move'} w={'14px'} mr={2} />
|
|
||||||
{t('Move')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => setMoveDataId(dataset._id)
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon name={'export'} w={'14px'} mr={2} />
|
|
||||||
{t('Export')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => {
|
|
||||||
exportDataset(dataset);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
...(dataset.permission === PermissionTypeEnum.private
|
|
||||||
? [
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon
|
|
||||||
name={'support/permission/publicLight'}
|
|
||||||
w={'14px'}
|
|
||||||
mr={2}
|
|
||||||
/>
|
|
||||||
{t('permission.Set Public')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => {
|
|
||||||
putDatasetById({
|
|
||||||
id: dataset._id,
|
|
||||||
permission: PermissionTypeEnum.public
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon
|
|
||||||
name={'support/permission/privateLight'}
|
|
||||||
w={'14px'}
|
|
||||||
mr={2}
|
|
||||||
/>
|
|
||||||
{t('permission.Set Private')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
onClick: () => {
|
|
||||||
putDatasetById({
|
|
||||||
id: dataset._id,
|
|
||||||
permission: PermissionTypeEnum.private
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
])
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
children: [
|
|
||||||
{
|
|
||||||
label: (
|
|
||||||
<Flex alignItems={'center'}>
|
|
||||||
<MyIcon name={'delete'} w={'14px'} mr={2} />
|
|
||||||
{t('common.Delete')}
|
|
||||||
</Flex>
|
|
||||||
),
|
|
||||||
type: 'danger',
|
|
||||||
onClick: () => {
|
|
||||||
openConfirm(
|
|
||||||
() => onclickDelDataset(dataset._id),
|
|
||||||
undefined,
|
|
||||||
DeleteTipsMap.current[dataset.type]
|
|
||||||
)();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]}
|
|
||||||
/>
|
|
||||||
</Box>
|
|
||||||
)}
|
|
||||||
<Flex alignItems={'center'} h={'38px'}>
|
|
||||||
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
|
|
||||||
<Box mx={3} className="textEllipsis3">
|
|
||||||
{dataset.name}
|
|
||||||
</Box>
|
|
||||||
</Flex>
|
|
||||||
<Box
|
|
||||||
flex={1}
|
|
||||||
className={'textEllipsis3'}
|
|
||||||
py={1}
|
|
||||||
wordBreak={'break-all'}
|
|
||||||
fontSize={'xs'}
|
|
||||||
color={'myGray.500'}
|
|
||||||
>
|
|
||||||
{dataset.intro ||
|
|
||||||
(dataset.type === DatasetTypeEnum.folder
|
|
||||||
? t('core.dataset.Folder placeholder')
|
|
||||||
: t('core.dataset.Intro Placeholder'))}
|
|
||||||
</Box>
|
|
||||||
<Flex alignItems={'center'} fontSize={'sm'}>
|
|
||||||
<Box flex={1}>
|
|
||||||
<PermissionIconText permission={dataset.permission} color={'myGray.600'} />
|
|
||||||
</Box>
|
|
||||||
{dataset.type !== DatasetTypeEnum.folder && (
|
|
||||||
<DatasetTypeTag type={dataset.type} py={1} px={2} />
|
|
||||||
)}
|
|
||||||
</Flex>
|
|
||||||
</Box>
|
|
||||||
))}
|
|
||||||
</Grid>
|
|
||||||
{myDatasets.length === 0 && (
|
|
||||||
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')}></EmptyTip>
|
|
||||||
)}
|
|
||||||
<ConfirmModal />
|
|
||||||
<EditTitleModal />
|
|
||||||
{isOpenCreateModal && <CreateModal onClose={onCloseCreateModal} parentId={parentId} />}
|
{isOpenCreateModal && <CreateModal onClose={onCloseCreateModal} parentId={parentId} />}
|
||||||
{!!editFolderData && (
|
{!!editFolderData && (
|
||||||
<EditFolderModal
|
<EditFolderModal
|
||||||
@ -477,16 +128,6 @@ const Dataset = () => {
|
|||||||
isEdit={false}
|
isEdit={false}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{!!moveDataId && (
|
|
||||||
<MoveModal
|
|
||||||
moveDataId={moveDataId}
|
|
||||||
onClose={() => setMoveDataId('')}
|
|
||||||
onSuccess={() => {
|
|
||||||
refetch();
|
|
||||||
setMoveDataId('');
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</PageContainer>
|
</PageContainer>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@ -499,4 +140,12 @@ export async function getServerSideProps(content: any) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export default Dataset;
|
function DatasetContextWrapper() {
|
||||||
|
return (
|
||||||
|
<DatasetContextProvider>
|
||||||
|
<Dataset />
|
||||||
|
</DatasetContextProvider>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default DatasetContextWrapper;
|
||||||
|
|||||||
@ -1,44 +0,0 @@
|
|||||||
import { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
|
|
||||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
|
||||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
|
||||||
import { AuthModeType } from '@fastgpt/service/support/permission/type';
|
|
||||||
|
|
||||||
/* data permission same of collection */
|
|
||||||
export async function authDatasetData({
|
|
||||||
dataId,
|
|
||||||
...props
|
|
||||||
}: AuthModeType & {
|
|
||||||
dataId: string;
|
|
||||||
}) {
|
|
||||||
// get mongo dataset.data
|
|
||||||
const datasetData = await MongoDatasetData.findById(dataId);
|
|
||||||
|
|
||||||
if (!datasetData) {
|
|
||||||
return Promise.reject('core.dataset.error.Data not found');
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await authDatasetCollection({
|
|
||||||
...props,
|
|
||||||
collectionId: datasetData.collectionId
|
|
||||||
});
|
|
||||||
|
|
||||||
const data: DatasetDataItemType = {
|
|
||||||
id: String(datasetData._id),
|
|
||||||
teamId: datasetData.teamId,
|
|
||||||
q: datasetData.q,
|
|
||||||
a: datasetData.a,
|
|
||||||
chunkIndex: datasetData.chunkIndex,
|
|
||||||
indexes: datasetData.indexes,
|
|
||||||
datasetId: String(datasetData.datasetId),
|
|
||||||
collectionId: String(datasetData.collectionId),
|
|
||||||
sourceName: result.collection.name || '',
|
|
||||||
sourceId: result.collection?.fileId || result.collection?.rawLink,
|
|
||||||
isOwner: String(datasetData.tmbId) === result.tmbId,
|
|
||||||
canWrite: result.canWrite
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
...result,
|
|
||||||
datasetData: data
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -11,5 +11,5 @@ export const getCollaboratorList = (appId: string) =>
|
|||||||
export const postUpdateAppCollaborators = (body: UpdateAppCollaboratorBody) =>
|
export const postUpdateAppCollaborators = (body: UpdateAppCollaboratorBody) =>
|
||||||
POST('/proApi/core/app/collaborator/update', body);
|
POST('/proApi/core/app/collaborator/update', body);
|
||||||
|
|
||||||
export const deleteAppCollaborators = ({ ...params }: AppCollaboratorDeleteParams) =>
|
export const deleteAppCollaborators = (params: AppCollaboratorDeleteParams) =>
|
||||||
DELETE('/proApi/core/app/collaborator/delete', { ...params });
|
DELETE('/proApi/core/app/collaborator/delete', params);
|
||||||
|
|||||||
15
projects/app/src/web/core/dataset/api/collaborator.ts
Normal file
15
projects/app/src/web/core/dataset/api/collaborator.ts
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import {
|
||||||
|
UpdateDatasetCollaboratorBody,
|
||||||
|
DatasetCollaboratorDeleteParams
|
||||||
|
} from '@fastgpt/global/core/dataset/collaborator';
|
||||||
|
import { DELETE, GET, POST } from '@/web/common/api/request';
|
||||||
|
import { CollaboratorItemType } from '@fastgpt/global/support/permission/collaborator';
|
||||||
|
|
||||||
|
export const getCollaboratorList = (datasetId: string) =>
|
||||||
|
GET<CollaboratorItemType[]>('/proApi/core/dataset/collaborator/list', { datasetId });
|
||||||
|
|
||||||
|
export const postUpdateDatasetCollaborators = (body: UpdateDatasetCollaboratorBody) =>
|
||||||
|
POST('/proApi/core/dataset/collaborator/update', body);
|
||||||
|
|
||||||
|
export const deleteDatasetCollaborators = ({ ...params }: DatasetCollaboratorDeleteParams) =>
|
||||||
|
DELETE('/proApi/core/dataset/collaborator/delete', { ...params });
|
||||||
@ -8,6 +8,8 @@ import type {
|
|||||||
DatasetCollectionItemType,
|
DatasetCollectionItemType,
|
||||||
DatasetItemType
|
DatasetItemType
|
||||||
} from '@fastgpt/global/core/dataset/type.d';
|
} from '@fastgpt/global/core/dataset/type.d';
|
||||||
|
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
|
||||||
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
||||||
|
|
||||||
export const defaultDatasetDetail: DatasetItemType = {
|
export const defaultDatasetDetail: DatasetItemType = {
|
||||||
_id: '',
|
_id: '',
|
||||||
@ -21,11 +23,10 @@ export const defaultDatasetDetail: DatasetItemType = {
|
|||||||
name: '',
|
name: '',
|
||||||
intro: '',
|
intro: '',
|
||||||
status: 'active',
|
status: 'active',
|
||||||
permission: 'private',
|
permission: new DatasetPermission(),
|
||||||
isOwner: false,
|
|
||||||
canWrite: false,
|
|
||||||
vectorModel: defaultVectorModels[0],
|
vectorModel: defaultVectorModels[0],
|
||||||
agentModel: defaultQAModels[0]
|
agentModel: defaultQAModels[0],
|
||||||
|
defaultPermission: DatasetDefaultPermission
|
||||||
};
|
};
|
||||||
|
|
||||||
export const defaultCollectionDetail: DatasetCollectionItemType = {
|
export const defaultCollectionDetail: DatasetCollectionItemType = {
|
||||||
@ -44,20 +45,21 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
|
|||||||
name: '',
|
name: '',
|
||||||
intro: '',
|
intro: '',
|
||||||
status: 'active',
|
status: 'active',
|
||||||
permission: 'private',
|
permission: new DatasetPermission(),
|
||||||
vectorModel: defaultVectorModels[0].model,
|
vectorModel: defaultVectorModels[0].model,
|
||||||
agentModel: defaultQAModels[0].model
|
agentModel: defaultQAModels[0].model,
|
||||||
|
defaultPermission: DatasetDefaultPermission
|
||||||
},
|
},
|
||||||
parentId: '',
|
parentId: '',
|
||||||
name: '',
|
name: '',
|
||||||
type: DatasetCollectionTypeEnum.file,
|
type: DatasetCollectionTypeEnum.file,
|
||||||
updateTime: new Date(),
|
updateTime: new Date(),
|
||||||
canWrite: false,
|
|
||||||
sourceName: '',
|
sourceName: '',
|
||||||
sourceId: '',
|
sourceId: '',
|
||||||
createTime: new Date(),
|
createTime: new Date(),
|
||||||
trainingType: TrainingModeEnum.chunk,
|
trainingType: TrainingModeEnum.chunk,
|
||||||
chunkSize: 0
|
chunkSize: 0,
|
||||||
|
permission: new DatasetPermission()
|
||||||
};
|
};
|
||||||
|
|
||||||
export enum ImportProcessWayEnum {
|
export enum ImportProcessWayEnum {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user