fix: collection list api old version (#3591)
* fix: collection list api format * fix: type error of addSourceMemeber
This commit is contained in:
parent
6f8c6b6ad1
commit
11cfe8a809
@ -147,8 +147,7 @@ async function handler(
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
const listWithSourceMember = await addSourceMember({
|
const listWithSourceMember = await addSourceMember({
|
||||||
list: list,
|
list: list
|
||||||
teamId
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const listWithoutTmbId = list.filter((item) => !item.tmbId);
|
const listWithoutTmbId = list.filter((item) => !item.tmbId);
|
||||||
|
|||||||
@ -203,8 +203,7 @@ async function handler(req: ApiRequestProps<ListAppBody>): Promise<AppListItemTy
|
|||||||
.filter((app) => app.permission.hasReadPer);
|
.filter((app) => app.permission.hasReadPer);
|
||||||
|
|
||||||
return addSourceMember({
|
return addSourceMember({
|
||||||
list: formatApps,
|
list: formatApps
|
||||||
teamId
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ import type { NextApiRequest } from 'next';
|
|||||||
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
||||||
import { Types } from '@fastgpt/service/common/mongo';
|
import { Types } from '@fastgpt/service/common/mongo';
|
||||||
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
||||||
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
|
|
||||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
@ -12,23 +11,20 @@ import { NextAPI } from '@/service/middleware/entry';
|
|||||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
import { readFromSecondary } from '@fastgpt/service/common/mongo/utils';
|
import { readFromSecondary } from '@fastgpt/service/common/mongo/utils';
|
||||||
import { collectionTagsToTagLabel } from '@fastgpt/service/core/dataset/collection/utils';
|
import { collectionTagsToTagLabel } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
import { PaginationResponse } from '@fastgpt/web/common/fetch/type';
|
|
||||||
import { parsePaginationRequest } from '@fastgpt/service/common/api/pagination';
|
|
||||||
|
|
||||||
async function handler(
|
async function handler(req: NextApiRequest) {
|
||||||
req: NextApiRequest
|
|
||||||
): Promise<PaginationResponse<DatasetCollectionsListItemType>> {
|
|
||||||
let {
|
let {
|
||||||
|
pageNum = 1,
|
||||||
|
pageSize = 10,
|
||||||
datasetId,
|
datasetId,
|
||||||
parentId = null,
|
parentId = null,
|
||||||
searchText = '',
|
searchText = '',
|
||||||
selectFolder = false,
|
selectFolder = false,
|
||||||
filterTags = [],
|
filterTags = [],
|
||||||
simple = false
|
simple = false
|
||||||
} = req.body as GetDatasetCollectionsProps;
|
} = req.body as any;
|
||||||
let { pageSize, offset } = parsePaginationRequest(req);
|
|
||||||
pageSize = Math.min(pageSize, 30);
|
|
||||||
searchText = searchText?.replace(/'/g, '');
|
searchText = searchText?.replace(/'/g, '');
|
||||||
|
pageSize = Math.min(pageSize, 30);
|
||||||
|
|
||||||
// auth dataset and get my role
|
// auth dataset and get my role
|
||||||
const { teamId, permission } = await authDataset({
|
const { teamId, permission } = await authDataset({
|
||||||
@ -80,7 +76,9 @@ async function handler(
|
|||||||
.lean();
|
.lean();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
list: await Promise.all(
|
pageNum,
|
||||||
|
pageSize,
|
||||||
|
data: await Promise.all(
|
||||||
collections.map(async (item) => ({
|
collections.map(async (item) => ({
|
||||||
...item,
|
...item,
|
||||||
tags: await collectionTagsToTagLabel({
|
tags: await collectionTagsToTagLabel({
|
||||||
@ -105,7 +103,7 @@ async function handler(
|
|||||||
$sort: { updateTime: -1 }
|
$sort: { updateTime: -1 }
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$skip: offset
|
$skip: (pageNum - 1) * pageSize
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
$limit: pageSize
|
$limit: pageSize
|
||||||
@ -167,7 +165,7 @@ async function handler(
|
|||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const list = await Promise.all(
|
const data = await Promise.all(
|
||||||
collections.map(async (item) => ({
|
collections.map(async (item) => ({
|
||||||
...item,
|
...item,
|
||||||
tags: await collectionTagsToTagLabel({
|
tags: await collectionTagsToTagLabel({
|
||||||
@ -178,13 +176,15 @@ async function handler(
|
|||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
if (list.find((item) => item.trainingAmount > 0)) {
|
if (data.find((item) => item.trainingAmount > 0)) {
|
||||||
startTrainingQueue();
|
startTrainingQueue();
|
||||||
}
|
}
|
||||||
|
|
||||||
// count collections
|
// count collections
|
||||||
return {
|
return {
|
||||||
list,
|
pageNum,
|
||||||
|
pageSize,
|
||||||
|
data,
|
||||||
total
|
total
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
192
projects/app/src/pages/api/core/dataset/collection/listV2.ts
Normal file
192
projects/app/src/pages/api/core/dataset/collection/listV2.ts
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
import type { NextApiRequest } from 'next';
|
||||||
|
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
|
||||||
|
import { Types } from '@fastgpt/service/common/mongo';
|
||||||
|
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
||||||
|
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
|
||||||
|
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||||
|
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
|
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
|
||||||
|
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
|
||||||
|
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
||||||
|
import { NextAPI } from '@/service/middleware/entry';
|
||||||
|
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||||
|
import { readFromSecondary } from '@fastgpt/service/common/mongo/utils';
|
||||||
|
import { collectionTagsToTagLabel } from '@fastgpt/service/core/dataset/collection/utils';
|
||||||
|
import { PaginationResponse } from '@fastgpt/web/common/fetch/type';
|
||||||
|
import { parsePaginationRequest } from '@fastgpt/service/common/api/pagination';
|
||||||
|
|
||||||
|
async function handler(
|
||||||
|
req: NextApiRequest
|
||||||
|
): Promise<PaginationResponse<DatasetCollectionsListItemType>> {
|
||||||
|
let {
|
||||||
|
datasetId,
|
||||||
|
parentId = null,
|
||||||
|
searchText = '',
|
||||||
|
selectFolder = false,
|
||||||
|
filterTags = [],
|
||||||
|
simple = false
|
||||||
|
} = req.body as GetDatasetCollectionsProps;
|
||||||
|
let { pageSize, offset } = parsePaginationRequest(req);
|
||||||
|
pageSize = Math.min(pageSize, 30);
|
||||||
|
searchText = searchText?.replace(/'/g, '');
|
||||||
|
|
||||||
|
// auth dataset and get my role
|
||||||
|
const { teamId, permission } = await authDataset({
|
||||||
|
req,
|
||||||
|
authToken: true,
|
||||||
|
authApiKey: true,
|
||||||
|
datasetId,
|
||||||
|
per: ReadPermissionVal
|
||||||
|
});
|
||||||
|
|
||||||
|
const match = {
|
||||||
|
teamId: new Types.ObjectId(teamId),
|
||||||
|
datasetId: new Types.ObjectId(datasetId),
|
||||||
|
parentId: parentId ? new Types.ObjectId(parentId) : null,
|
||||||
|
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
|
||||||
|
...(searchText
|
||||||
|
? {
|
||||||
|
name: new RegExp(searchText, 'i')
|
||||||
|
}
|
||||||
|
: {}),
|
||||||
|
...(filterTags.length ? { tags: { $in: filterTags } } : {})
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectField = {
|
||||||
|
_id: 1,
|
||||||
|
parentId: 1,
|
||||||
|
tmbId: 1,
|
||||||
|
name: 1,
|
||||||
|
type: 1,
|
||||||
|
forbid: 1,
|
||||||
|
createTime: 1,
|
||||||
|
updateTime: 1,
|
||||||
|
trainingType: 1,
|
||||||
|
fileId: 1,
|
||||||
|
rawLink: 1,
|
||||||
|
tags: 1,
|
||||||
|
externalFileId: 1
|
||||||
|
};
|
||||||
|
|
||||||
|
// not count data amount
|
||||||
|
if (simple) {
|
||||||
|
const collections = await MongoDatasetCollection.find(match, undefined, {
|
||||||
|
...readFromSecondary
|
||||||
|
})
|
||||||
|
.select(selectField)
|
||||||
|
.sort({
|
||||||
|
updateTime: -1
|
||||||
|
})
|
||||||
|
.lean();
|
||||||
|
|
||||||
|
return {
|
||||||
|
list: await Promise.all(
|
||||||
|
collections.map(async (item) => ({
|
||||||
|
...item,
|
||||||
|
tags: await collectionTagsToTagLabel({
|
||||||
|
datasetId,
|
||||||
|
tags: item.tags
|
||||||
|
}),
|
||||||
|
dataAmount: 0,
|
||||||
|
trainingAmount: 0,
|
||||||
|
permission
|
||||||
|
}))
|
||||||
|
),
|
||||||
|
total: await MongoDatasetCollection.countDocuments(match)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
|
||||||
|
MongoDatasetCollection.aggregate([
|
||||||
|
{
|
||||||
|
$match: match
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$sort: { updateTime: -1 }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$skip: offset
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$limit: pageSize
|
||||||
|
},
|
||||||
|
// count training data
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: DatasetTrainingCollectionName,
|
||||||
|
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||||
|
pipeline: [
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
$expr: {
|
||||||
|
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $count: 'count' }
|
||||||
|
],
|
||||||
|
as: 'trainingCount'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// count collection total data
|
||||||
|
{
|
||||||
|
$lookup: {
|
||||||
|
from: DatasetDataCollectionName,
|
||||||
|
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
|
||||||
|
pipeline: [
|
||||||
|
{
|
||||||
|
$match: {
|
||||||
|
$expr: {
|
||||||
|
$and: [
|
||||||
|
{ $eq: ['$teamId', '$$team_id'] },
|
||||||
|
{ $eq: ['$datasetId', '$$dataset_id'] },
|
||||||
|
{ $eq: ['$collectionId', '$$id'] }
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{ $count: 'count' }
|
||||||
|
],
|
||||||
|
as: 'dataCount'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$project: {
|
||||||
|
...selectField,
|
||||||
|
dataAmount: {
|
||||||
|
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
|
||||||
|
},
|
||||||
|
trainingAmount: {
|
||||||
|
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]),
|
||||||
|
MongoDatasetCollection.countDocuments(match, {
|
||||||
|
...readFromSecondary
|
||||||
|
})
|
||||||
|
]);
|
||||||
|
|
||||||
|
const list = await Promise.all(
|
||||||
|
collections.map(async (item) => ({
|
||||||
|
...item,
|
||||||
|
tags: await collectionTagsToTagLabel({
|
||||||
|
datasetId,
|
||||||
|
tags: item.tags
|
||||||
|
}),
|
||||||
|
permission
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
if (list.find((item) => item.trainingAmount > 0)) {
|
||||||
|
startTrainingQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
// count collections
|
||||||
|
return {
|
||||||
|
list,
|
||||||
|
total
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
@ -107,7 +107,7 @@ export const postSearchText = (data: SearchTestProps) =>
|
|||||||
|
|
||||||
/* ============================= collections ==================================== */
|
/* ============================= collections ==================================== */
|
||||||
export const getDatasetCollections = (data: GetDatasetCollectionsProps) =>
|
export const getDatasetCollections = (data: GetDatasetCollectionsProps) =>
|
||||||
POST<PaginationResponse<DatasetCollectionsListItemType>>(`/core/dataset/collection/list`, data);
|
POST<PaginationResponse<DatasetCollectionsListItemType>>(`/core/dataset/collection/listV2`, data);
|
||||||
export const getDatasetCollectionPathById = (parentId: string) =>
|
export const getDatasetCollectionPathById = (parentId: string) =>
|
||||||
GET<ParentTreePathItemType[]>(`/core/dataset/collection/paths`, { parentId });
|
GET<ParentTreePathItemType[]>(`/core/dataset/collection/paths`, { parentId });
|
||||||
export const getDatasetCollectionById = (id: string) =>
|
export const getDatasetCollectionById = (id: string) =>
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user