fix: model test channel id;fix: quote reader (#4123)
* fix: model test channel id * fix: quote reader
This commit is contained in:
parent
d1ce3e2936
commit
d052d0de53
@ -44,6 +44,7 @@ const ResponseTags = ({
|
||||
const chatType = useContextSelector(ChatBoxContext, (v) => v.chatType);
|
||||
const appId = useContextSelector(ChatBoxContext, (v) => v.appId);
|
||||
const chatId = useContextSelector(ChatBoxContext, (v) => v.chatId);
|
||||
const outLinkAuthData = useContextSelector(ChatBoxContext, (v) => v.outLinkAuthData);
|
||||
|
||||
const setQuoteData = useContextSelector(ChatItemContext, (v) => v.setQuoteData);
|
||||
|
||||
@ -65,6 +66,7 @@ const ResponseTags = ({
|
||||
? quoteListRef.current.scrollHeight > (isPc ? 50 : 55)
|
||||
: true;
|
||||
|
||||
const isShowReadRawSource = useContextSelector(ChatItemContext, (v) => v.isShowReadRawSource);
|
||||
const sourceList = useMemo(() => {
|
||||
return Object.values(
|
||||
quoteList.reduce((acc: Record<string, SearchDataResponseItemType[]>, cur) => {
|
||||
@ -157,18 +159,34 @@ const ResponseTags = ({
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
|
||||
setQuoteData({
|
||||
rawSearch: quoteList,
|
||||
metadata: {
|
||||
appId,
|
||||
chatId,
|
||||
chatItemDataId: dataId,
|
||||
collectionId: item.collectionId,
|
||||
sourceId: item.sourceId || '',
|
||||
sourceName: item.sourceName,
|
||||
datasetId: item.datasetId
|
||||
}
|
||||
});
|
||||
if (isShowReadRawSource) {
|
||||
setQuoteData({
|
||||
rawSearch: quoteList,
|
||||
metadata: {
|
||||
appId,
|
||||
chatId,
|
||||
chatItemDataId: dataId,
|
||||
collectionId: item.collectionId,
|
||||
sourceId: item.sourceId || '',
|
||||
sourceName: item.sourceName,
|
||||
datasetId: item.datasetId,
|
||||
outLinkAuthData
|
||||
}
|
||||
});
|
||||
} else {
|
||||
setQuoteData({
|
||||
rawSearch: quoteList,
|
||||
metadata: {
|
||||
appId,
|
||||
chatId,
|
||||
chatItemDataId: dataId,
|
||||
collectionIdList: [item.collectionId],
|
||||
sourceId: item.sourceId || '',
|
||||
sourceName: item.sourceName,
|
||||
outLinkAuthData
|
||||
}
|
||||
});
|
||||
}
|
||||
}}
|
||||
height={6}
|
||||
>
|
||||
@ -230,7 +248,8 @@ const ResponseTags = ({
|
||||
appId,
|
||||
chatId,
|
||||
chatItemDataId: dataId,
|
||||
collectionIdList: [...new Set(quoteList.map((item) => item.collectionId))]
|
||||
collectionIdList: [...new Set(quoteList.map((item) => item.collectionId))],
|
||||
outLinkAuthData
|
||||
}
|
||||
});
|
||||
}}
|
||||
|
||||
@ -34,7 +34,15 @@ type ModelTestItem = {
|
||||
duration?: number;
|
||||
};
|
||||
|
||||
const ModelTest = ({ models, onClose }: { models: string[]; onClose: () => void }) => {
|
||||
const ModelTest = ({
|
||||
channelId,
|
||||
models,
|
||||
onClose
|
||||
}: {
|
||||
channelId: number;
|
||||
models: string[];
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const { toast } = useToast();
|
||||
const [testModelList, setTestModelList] = useState<ModelTestItem[]>([]);
|
||||
@ -57,6 +65,7 @@ const ModelTest = ({ models, onClose }: { models: string[]; onClose: () => void
|
||||
colorSchema: 'red'
|
||||
}
|
||||
});
|
||||
|
||||
const { loading: loadingModels } = useRequest2(getSystemModelList, {
|
||||
manual: false,
|
||||
refreshDeps: [models],
|
||||
@ -95,7 +104,7 @@ const ModelTest = ({ models, onClose }: { models: string[]; onClose: () => void
|
||||
);
|
||||
const start = Date.now();
|
||||
try {
|
||||
await getTestModel(model);
|
||||
await getTestModel({ model, channelId });
|
||||
const duration = Date.now() - start;
|
||||
setTestModelList((prev) =>
|
||||
prev.map((item) =>
|
||||
|
||||
@ -74,7 +74,7 @@ const ChannelTable = ({ Tab }: { Tab: React.ReactNode }) => {
|
||||
}
|
||||
});
|
||||
|
||||
const [testModels, setTestModels] = useState<string[]>();
|
||||
const [modelTestData, setTestModelData] = useState<{ channelId: number; models: string[] }>();
|
||||
|
||||
const isLoading =
|
||||
loadingChannelList ||
|
||||
@ -165,7 +165,11 @@ const ChannelTable = ({ Tab }: { Tab: React.ReactNode }) => {
|
||||
{
|
||||
icon: 'core/chat/sendLight',
|
||||
label: t('account_model:model_test'),
|
||||
onClick: () => setTestModels(item.models)
|
||||
onClick: () =>
|
||||
setTestModelData({
|
||||
channelId: item.id,
|
||||
models: item.models
|
||||
})
|
||||
},
|
||||
...(item.status === ChannelStatusEnum.ChannelStatusEnabled
|
||||
? [
|
||||
@ -222,7 +226,9 @@ const ChannelTable = ({ Tab }: { Tab: React.ReactNode }) => {
|
||||
onSuccess={refreshChannelList}
|
||||
/>
|
||||
)}
|
||||
{!!testModels && <ModelTest models={testModels} onClose={() => setTestModels(undefined)} />}
|
||||
{!!modelTestData && (
|
||||
<ModelTest {...modelTestData} onClose={() => setTestModelData(undefined)} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@ -35,9 +35,11 @@ const CollectionReader = ({
|
||||
const { t } = useTranslation();
|
||||
const router = useRouter();
|
||||
const { userInfo } = useUserStore();
|
||||
|
||||
const { collectionId, datasetId, chatItemDataId, sourceId, sourceName } = metadata;
|
||||
const [quoteIndex, setQuoteIndex] = useState(0);
|
||||
|
||||
// Get dataset permission
|
||||
const { data: permissionData, loading: isPermissionLoading } = useRequest2(
|
||||
async () => await getDatasetDataPermission(datasetId),
|
||||
{
|
||||
@ -56,6 +58,7 @@ const CollectionReader = ({
|
||||
|
||||
const currentQuoteItem = filterResults[quoteIndex];
|
||||
|
||||
// Get quote list
|
||||
const {
|
||||
dataList: datasetDataList,
|
||||
setDataList: setDatasetDataList,
|
||||
|
||||
@ -22,10 +22,14 @@ const QuoteReader = ({
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const filterRawSearch = useMemo(() => {
|
||||
return rawSearch.filter((item) => metadata.collectionIdList.includes(item.collectionId));
|
||||
}, [rawSearch, metadata.collectionIdList]);
|
||||
|
||||
const { data: quoteList, loading } = useRequest2(
|
||||
async () =>
|
||||
await getQuoteDataList({
|
||||
datasetDataIdList: rawSearch.map((item) => item.id),
|
||||
datasetDataIdList: filterRawSearch.map((item) => item.id),
|
||||
collectionIdList: metadata.collectionIdList,
|
||||
chatItemDataId: metadata.chatItemDataId,
|
||||
appId: metadata.appId,
|
||||
@ -33,12 +37,13 @@ const QuoteReader = ({
|
||||
...metadata.outLinkAuthData
|
||||
}),
|
||||
{
|
||||
refreshDeps: [metadata, filterRawSearch],
|
||||
manual: false
|
||||
}
|
||||
);
|
||||
|
||||
const formatedDataList = useMemo(() => {
|
||||
return rawSearch
|
||||
return filterRawSearch
|
||||
.map((searchItem) => {
|
||||
const dataItem = quoteList?.find((item) => item._id === searchItem.id);
|
||||
|
||||
@ -57,7 +62,7 @@ const QuoteReader = ({
|
||||
.sort((a, b) => {
|
||||
return (b.score.primaryScore?.value || 0) - (a.score.primaryScore?.value || 0);
|
||||
});
|
||||
}, [quoteList, rawSearch]);
|
||||
}, [quoteList, filterRawSearch]);
|
||||
|
||||
return (
|
||||
<Flex flexDirection={'column'} h={'full'}>
|
||||
@ -71,16 +76,48 @@ const QuoteReader = ({
|
||||
>
|
||||
<Box flex={1} py={4}>
|
||||
<Flex gap={2} mr={2} mb={1}>
|
||||
<MyIcon name={'core/chat/quoteFill'} w={['1rem', '1.25rem']} color={'primary.600'} />
|
||||
<Box
|
||||
maxW={['200px', '300px']}
|
||||
className={'textEllipsis'}
|
||||
wordBreak={'break-all'}
|
||||
color={'myGray.900'}
|
||||
fontWeight={'medium'}
|
||||
>
|
||||
{t('common:core.chat.Quote Amount', { amount: rawSearch.length })}
|
||||
</Box>
|
||||
{metadata.sourceId ? (
|
||||
<>
|
||||
<MyIcon
|
||||
name={
|
||||
getSourceNameIcon({
|
||||
sourceId: metadata.sourceId,
|
||||
sourceName: metadata.sourceName || ''
|
||||
}) as any
|
||||
}
|
||||
w={['1rem', '1.25rem']}
|
||||
color={'primary.600'}
|
||||
/>
|
||||
<Box
|
||||
ml={1}
|
||||
maxW={['200px', '220px']}
|
||||
className={'textEllipsis'}
|
||||
wordBreak={'break-all'}
|
||||
fontSize={'sm'}
|
||||
color={'myGray.900'}
|
||||
fontWeight={'medium'}
|
||||
>
|
||||
{metadata.sourceName || t('common:common.UnKnow Source')}
|
||||
</Box>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<MyIcon
|
||||
name={'core/chat/quoteFill'}
|
||||
w={['1rem', '1.25rem']}
|
||||
color={'primary.600'}
|
||||
/>
|
||||
<Box
|
||||
maxW={['200px', '300px']}
|
||||
className={'textEllipsis'}
|
||||
wordBreak={'break-all'}
|
||||
color={'myGray.900'}
|
||||
fontWeight={'medium'}
|
||||
>
|
||||
{t('common:core.chat.Quote Amount', { amount: filterRawSearch.length })}
|
||||
</Box>
|
||||
</>
|
||||
)}
|
||||
</Flex>
|
||||
<Box fontSize={'mini'} color={'myGray.500'}>
|
||||
{t('common:core.chat.quote.Quote Tip')}
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
import React from 'react';
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { ChatItemContext, GetQuoteProps } from '@/web/core/chat/context/chatItemContext';
|
||||
import CollectionQuoteReader from './CollectionQuoteReader';
|
||||
import QuoteReader from './QuoteReader';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
|
||||
const ChatQuoteList = ({
|
||||
rawSearch = [],
|
||||
@ -18,7 +18,7 @@ const ChatQuoteList = ({
|
||||
|
||||
return (
|
||||
<>
|
||||
{'collectionId' in metadata && isShowReadRawSource && (
|
||||
{'collectionId' in metadata && (
|
||||
<CollectionQuoteReader rawSearch={rawSearch} metadata={metadata} onClose={onClose} />
|
||||
)}
|
||||
{'collectionIdList' in metadata && (
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { authSystemAdmin } from '@fastgpt/service/support/permission/user/auth';
|
||||
import { findModelFromAlldata, getReRankModel } from '@fastgpt/service/core/ai/model';
|
||||
import { findModelFromAlldata } from '@fastgpt/service/core/ai/model';
|
||||
import {
|
||||
EmbeddingModelItemType,
|
||||
LLMModelItemType,
|
||||
@ -9,7 +9,7 @@ import {
|
||||
STTModelType,
|
||||
TTSModelType
|
||||
} from '@fastgpt/global/core/ai/model.d';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import { createChatCompletion, getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
|
||||
import { reRankRecall } from '@fastgpt/service/core/ai/rerank';
|
||||
@ -18,7 +18,7 @@ import { isProduction } from '@fastgpt/global/common/system/constants';
|
||||
import * as fs from 'fs';
|
||||
import { llmCompletionsBodyFormat } from '@fastgpt/service/core/ai/utils';
|
||||
|
||||
export type testQuery = { model: string; channelId?: string };
|
||||
export type testQuery = { model: string; channelId?: number };
|
||||
|
||||
export type testBody = {};
|
||||
|
||||
@ -37,7 +37,7 @@ async function handler(
|
||||
|
||||
const headers: Record<string, string> = channelId
|
||||
? {
|
||||
'Aiproxy-Channel': channelId
|
||||
'Aiproxy-Channel': String(channelId)
|
||||
}
|
||||
: {};
|
||||
|
||||
@ -75,26 +75,33 @@ const testLLMModel = async (model: LLMModelItemType, headers: Record<string, str
|
||||
},
|
||||
model
|
||||
);
|
||||
const response = await ai.chat.completions.create(requestBody, {
|
||||
...(model.requestUrl ? { path: model.requestUrl } : {}),
|
||||
headers: model.requestAuth
|
||||
? {
|
||||
Authorization: `Bearer ${model.requestAuth}`,
|
||||
...headers
|
||||
}
|
||||
: headers
|
||||
const { response, isStreamResponse } = await createChatCompletion({
|
||||
body: requestBody,
|
||||
options: {
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*',
|
||||
...headers
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for await (const part of response) {
|
||||
const content = part.choices?.[0]?.delta?.content || '';
|
||||
// @ts-ignore
|
||||
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
|
||||
if (content || reasoningContent) {
|
||||
response?.controller?.abort();
|
||||
return;
|
||||
if (isStreamResponse) {
|
||||
for await (const part of response) {
|
||||
const content = part.choices?.[0]?.delta?.content || '';
|
||||
// @ts-ignore
|
||||
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
|
||||
if (content || reasoningContent) {
|
||||
response?.controller?.abort();
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
addLog.info(`Model not stream response`);
|
||||
const answer = response.choices?.[0]?.message?.content || '';
|
||||
if (answer) {
|
||||
return answer;
|
||||
}
|
||||
}
|
||||
addLog.info(`Model not stream response`);
|
||||
|
||||
return Promise.reject('Model response empty');
|
||||
};
|
||||
|
||||
@ -52,7 +52,7 @@ async function handler(
|
||||
|
||||
const limitedPageSize = Math.min(pageSize, 30);
|
||||
|
||||
const [{ chat }, { chatItem }] = await Promise.all([
|
||||
const [{ chat, showRawSource }, { chatItem }] = await Promise.all([
|
||||
authChatCrud({
|
||||
req,
|
||||
authToken: true,
|
||||
@ -65,6 +65,9 @@ async function handler(
|
||||
}),
|
||||
authCollectionInChat({ appId, chatId, chatItemDataId, collectionIds: [collectionId] })
|
||||
]);
|
||||
if (!showRawSource) {
|
||||
return Promise.reject(ChatErrEnum.unAuthChat);
|
||||
}
|
||||
if (!chat) return Promise.reject(ChatErrEnum.unAuthChat);
|
||||
|
||||
const baseMatch: BaseMatchType = {
|
||||
|
||||
@ -245,16 +245,15 @@ const OutLink = (props: Props) => {
|
||||
desc={props.appIntro || data?.app?.intro}
|
||||
icon={props.appAvatar || data?.app?.avatar}
|
||||
/>
|
||||
<Flex h={'full'}>
|
||||
<Flex
|
||||
h={'full'}
|
||||
gap={4}
|
||||
{...(isEmbed
|
||||
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
|
||||
: { p: [0, 5] })}
|
||||
>
|
||||
{(!quoteData || isPc) && (
|
||||
<PageContainer
|
||||
flex={'1 0 0'}
|
||||
w={0}
|
||||
isLoading={loading}
|
||||
{...(isEmbed
|
||||
? { p: '0 !important', insertProps: { borderRadius: '0', boxShadow: 'none' } }
|
||||
: { p: [0, 5] })}
|
||||
>
|
||||
<PageContainer flex={'1 0 0'} w={0} isLoading={loading} p={'0 !important'}>
|
||||
<Flex h={'100%'} flexDirection={['column', 'row']}>
|
||||
{RenderHistoryList}
|
||||
|
||||
@ -302,7 +301,7 @@ const OutLink = (props: Props) => {
|
||||
)}
|
||||
|
||||
{quoteData && (
|
||||
<PageContainer flex={'1 0 0'} w={0} maxW={'560px'}>
|
||||
<PageContainer flex={'1 0 0'} w={0} maxW={'560px'} p={'0 !important'}>
|
||||
<ChatQuoteList
|
||||
rawSearch={quoteData.rawSearch}
|
||||
metadata={quoteData.metadata}
|
||||
|
||||
@ -5,6 +5,7 @@ import type { deleteQuery } from '@/pages/api/core/ai/model/delete';
|
||||
import type { SystemModelItemType } from '@fastgpt/service/core/ai/type';
|
||||
import type { updateWithJsonBody } from '@/pages/api/core/ai/model/updateWithJson';
|
||||
import type { updateDefaultBody } from '@/pages/api/core/ai/model/updateDefault';
|
||||
import type { testQuery } from '@/pages/api/core/ai/model/test';
|
||||
|
||||
export const getSystemModelList = () => GET<listResponse>('/core/ai/model/list');
|
||||
export const getSystemModelDetail = (model: string) =>
|
||||
@ -21,7 +22,7 @@ export const getModelConfigJson = () => GET<string>('/core/ai/model/getConfigJso
|
||||
export const putUpdateWithJson = (data: updateWithJsonBody) =>
|
||||
PUT('/core/ai/model/updateWithJson', data);
|
||||
|
||||
export const getTestModel = (model: String) => GET('/core/ai/model/test', { model });
|
||||
export const getTestModel = (data: testQuery) => GET('/core/ai/model/test', data);
|
||||
|
||||
export const putUpdateDefaultModels = (data: updateDefaultBody) =>
|
||||
PUT('/core/ai/model/updateDefault', data);
|
||||
|
||||
@ -48,6 +48,8 @@ export type GetCollectionQuoteDataProps = GetQuoteDataBasicProps & {
|
||||
};
|
||||
export type GetAllQuoteDataProps = GetQuoteDataBasicProps & {
|
||||
collectionIdList: string[];
|
||||
sourceId?: string;
|
||||
sourceName?: string;
|
||||
};
|
||||
export type GetQuoteProps = GetAllQuoteDataProps | GetCollectionQuoteDataProps;
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user