update openrouter models config

This commit is contained in:
duanfuxiang 2025-02-17 15:31:25 +08:00
parent 025dc85c59
commit 8eaafd5e75
7 changed files with 145 additions and 57 deletions

View File

@ -1,6 +1,6 @@
import * as Tooltip from '@radix-ui/react-tooltip' import * as Tooltip from '@radix-ui/react-tooltip'
import { Check, CopyIcon } from 'lucide-react' import { Check, CopyIcon } from 'lucide-react'
import { useMemo, useState } from 'react' import { useEffect, useState } from 'react'
import { ChatAssistantMessage } from '../../types/chat' import { ChatAssistantMessage } from '../../types/chat'
import { calculateLLMCost } from '../../utils/price-calculator' import { calculateLLMCost } from '../../utils/price-calculator'
@ -44,15 +44,23 @@ function CopyButton({ message }: { message: ChatAssistantMessage }) {
} }
function LLMResponesInfoButton({ message }: { message: ChatAssistantMessage }) { function LLMResponesInfoButton({ message }: { message: ChatAssistantMessage }) {
const cost = useMemo<number | null>(() => { const [cost, setCost] = useState<number | null>(0);
if (!message.metadata?.model || !message.metadata?.usage) {
return 0 useEffect(() => {
async function calculateCost() {
if (!message.metadata?.model || !message.metadata?.usage) {
setCost(0);
return;
}
const calculatedCost = await calculateLLMCost({
model: message.metadata.model,
usage: message.metadata.usage,
});
setCost(calculatedCost);
} }
return calculateLLMCost({
model: message.metadata.model, calculateCost();
usage: message.metadata.usage, }, [message]);
})
}, [message])
return ( return (
<Tooltip.Provider delayDuration={0}> <Tooltip.Provider delayDuration={0}>

View File

@ -1,17 +1,31 @@
import * as DropdownMenu from '@radix-ui/react-dropdown-menu' import * as DropdownMenu from '@radix-ui/react-dropdown-menu'
import { ChevronDown, ChevronUp } from 'lucide-react' import { ChevronDown, ChevronUp } from 'lucide-react'
import { useMemo, useState } from 'react' import { useEffect, useState } from 'react'
import { useSettings } from '../../../contexts/SettingsContext' import { useSettings } from '../../../contexts/SettingsContext'
import { GetProviderModelIds } from "../../../utils/api" import { GetProviderModelIds } from "../../../utils/api"
export function ModelSelect() { export function ModelSelect() {
const { settings, setSettings } = useSettings() const { settings, setSettings } = useSettings()
const [isOpen, setIsOpen] = useState(false) const [isOpen, setIsOpen] = useState(false)
const [chatModelId, setChatModelId] = useState(settings.chatModelId)
const [providerModels, setProviderModels] = useState<string[]>([])
const [isLoading, setIsLoading] = useState(true)
const[chatModelId, setChatModelId] = useState(settings.chatModelId) useEffect(() => {
const fetchModels = async () => {
const currProviderModels = useMemo(() => { setIsLoading(true)
return GetProviderModelIds(settings.chatModelProvider) try {
const models = await GetProviderModelIds(settings.chatModelProvider)
setProviderModels(models)
} catch (error) {
console.error('Failed to fetch provider models:', error)
} finally {
setIsLoading(false)
}
}
fetchModels()
}, [settings.chatModelProvider]) }, [settings.chatModelProvider])
return ( return (
@ -29,21 +43,25 @@ export function ModelSelect() {
<DropdownMenu.Content <DropdownMenu.Content
className="infio-popover"> className="infio-popover">
<ul> <ul>
{currProviderModels.map((modelId) => ( {isLoading ? (
<DropdownMenu.Item <li>Loading...</li>
key={modelId} ) : (
onSelect={() => { providerModels.map((modelId) => (
setChatModelId(modelId) <DropdownMenu.Item
setSettings({ key={modelId}
...settings, onSelect={() => {
chatModelId: modelId, setChatModelId(modelId)
}) setSettings({
}} ...settings,
asChild chatModelId: modelId,
> })
<li>{modelId}</li> }}
</DropdownMenu.Item> asChild
))} >
<li>{modelId}</li>
</DropdownMenu.Item>
))
)}
</ul> </ul>
</DropdownMenu.Content> </DropdownMenu.Content>
</DropdownMenu.Portal> </DropdownMenu.Portal>

View File

@ -58,14 +58,19 @@ const ControlArea: React.FC<ControlAreaProps> = ({
onModelChange, onModelChange,
isSubmitting, isSubmitting,
}) => { }) => {
const currProviderModels = useMemo(() => { const [providerModels, setProviderModels] = useState<string[]>([]);
return GetProviderModelIds(settings.chatModelProvider)
.map((modelId) => ( useEffect(() => {
<option key={modelId} value={modelId}> const fetchModels = async () => {
{modelId} try {
</option> const models = await GetProviderModelIds(settings.chatModelProvider);
)) setProviderModels(models);
}, [settings]) } catch (error) {
console.error("Failed to fetch provider models:", error);
}
};
fetchModels();
}, [settings]);
return ( return (
<div className="infio-ai-block-controls"> <div className="infio-ai-block-controls">
@ -75,7 +80,11 @@ const ControlArea: React.FC<ControlAreaProps> = ({
onChange={(e) => onModelChange(e.target.value)} onChange={(e) => onModelChange(e.target.value)}
disabled={isSubmitting} disabled={isSubmitting}
> >
{currProviderModels} {providerModels.map((modelId) => (
<option key={modelId} value={modelId}>
{modelId}
</option>
))}
</select> </select>
<button <button
className="infio-ai-block-submit-button" className="infio-ai-block-submit-button"

View File

@ -34,7 +34,7 @@ class LLMClient {
async queryChatModel(messages: RequestMessage[]): Promise<Result<string, Error>> { async queryChatModel(messages: RequestMessage[]): Promise<Result<string, Error>> {
const data = await this.llm.generateResponse(this.model, { const data = await this.llm.generateResponse(this.model, {
model: this.model.name, model: this.model.modelId,
messages: messages, messages: messages,
stream: false, stream: false,
}) })

View File

@ -170,9 +170,19 @@ export const ComboBoxComponent: React.FC<ComboBoxComponentProps> = ({
const providers = isEmbedding ? GetEmbeddingProviders() : GetAllProviders() const providers = isEmbedding ? GetEmbeddingProviders() : GetAllProviders()
const modelIds = useMemo(() => { const [modelIds, setModelIds] = useState<string[]>([]);
return isEmbedding ? GetEmbeddingProviderModelIds(modelProvider) : GetProviderModelIds(modelProvider)
}, [modelProvider]) // Replace useMemo with useEffect for async fetching
useEffect(() => {
const fetchModelIds = async () => {
const ids = isEmbedding
? GetEmbeddingProviderModelIds(modelProvider)
: await GetProviderModelIds(modelProvider);
setModelIds(ids);
};
fetchModelIds();
}, [modelProvider, isEmbedding]);
const searchableItems = useMemo(() => { const searchableItems = useMemo(() => {
return modelIds.map((id) => ({ return modelIds.map((id) => ({
@ -182,8 +192,8 @@ export const ComboBoxComponent: React.FC<ComboBoxComponentProps> = ({
}, [modelIds]) }, [modelIds])
// 初始化 fuse用于模糊搜索简单配置 threshold 可按需调整 // 初始化 fuse用于模糊搜索简单配置 threshold 可按需调整
const fuse = useMemo(() => { const fuse: Fuse<SearchableItem> = useMemo(() => {
return new Fuse(searchableItems, { return new Fuse<SearchableItem>(searchableItems, {
keys: ["html"], keys: ["html"],
threshold: 0.6, threshold: 0.6,
shouldSort: true, shouldSort: true,
@ -200,7 +210,7 @@ export const ComboBoxComponent: React.FC<ComboBoxComponentProps> = ({
? highlight(fuse.search(searchTerm)) ? highlight(fuse.search(searchTerm))
: searchableItems.map(item => ({ : searchableItems.map(item => ({
...item, ...item,
html: [{ text: item.html, isHighlighted: false }] html: typeof item.html === 'string' ? [{ text: item.html, isHighlighted: false }] : item.html
})) }))
return results return results
}, [searchableItems, searchTerm, fuse]) }, [searchableItems, searchTerm, fuse])

View File

@ -1,3 +1,4 @@
import { OPENROUTER_BASE_URL } from '../constants'
import { ApiProvider } from '../types/llm/model' import { ApiProvider } from '../types/llm/model'
export interface ModelInfo { export interface ModelInfo {
@ -1143,13 +1144,50 @@ export const GetEmbeddingProviders = (): ApiProvider[] => {
ApiProvider.AlibabaQwen ApiProvider.AlibabaQwen
] ]
} }
let openRouterModelsCache: Record<string, ModelInfo> | null = null;
async function fetchOpenRouterModels(): Promise<Record<string, ModelInfo>> {
if (openRouterModelsCache) {
return openRouterModelsCache;
}
try {
const response = await fetch(OPENROUTER_BASE_URL + "/models");
const data = await response.json();
const models: Record<string, ModelInfo> = {};
if (data?.data) {
for (const model of data.data) {
models[model.id] = {
maxTokens: model.top_provider?.max_completion_tokens ?? model.context_length,
contextWindow: model.context_length,
supportsImages: model.architecture?.modality?.includes("image") ?? false,
supportsPromptCache: false,
inputPrice: model.pricing?.prompt ?? 0,
outputPrice: model.pricing?.completion ?? 0,
description: model.description,
};
}
}
openRouterModelsCache = models;
return models;
} catch (error) {
console.error('Failed to fetch OpenRouter models:', error);
return {
[openRouterDefaultModelId]: openRouterDefaultModelInfo
};
}
}
// Get all models for a provider // Get all models for a provider
export const GetProviderModels = (provider: ApiProvider): Record<string, ModelInfo> => { export const GetProviderModels = async (provider: ApiProvider): Promise<Record<string, ModelInfo>> => {
switch (provider) { switch (provider) {
case ApiProvider.Infio: case ApiProvider.Infio:
return infioModels return infioModels
case ApiProvider.OpenRouter: case ApiProvider.OpenRouter:
return {} return await fetchOpenRouterModels()
case ApiProvider.OpenAI: case ApiProvider.OpenAI:
return openAiNativeModels return openAiNativeModels
case ApiProvider.AlibabaQwen: case ApiProvider.AlibabaQwen:
@ -1172,7 +1210,16 @@ export const GetProviderModels = (provider: ApiProvider): Record<string, ModelIn
return {} return {}
} }
} }
// Get all models for a provider
// Get all model ids for a provider
export const GetProviderModelIds = async (provider: ApiProvider): Promise<string[]> => {
const models = await GetProviderModels(provider)
return Object.keys(models)
}
/// Embedding models
// Get all embedding models for a provider
export const GetEmbeddingProviderModels = (provider: ApiProvider): Record<string, EmbeddingModelInfo> => { export const GetEmbeddingProviderModels = (provider: ApiProvider): Record<string, EmbeddingModelInfo> => {
switch (provider) { switch (provider) {
case ApiProvider.Google: case ApiProvider.Google:
@ -1187,15 +1234,11 @@ export const GetEmbeddingProviderModels = (provider: ApiProvider): Record<string
return {} return {}
} }
} }
// Get all model ids for a provider // Get all embedding model ids for a provider
export const GetProviderModelIds = (provider: ApiProvider): string[] => {
return Object.keys(GetProviderModels(provider))
}
export const GetEmbeddingProviderModelIds = (provider: ApiProvider): string[] => { export const GetEmbeddingProviderModelIds = (provider: ApiProvider): string[] => {
return Object.keys(GetEmbeddingProviderModels(provider)) return Object.keys(GetEmbeddingProviderModels(provider))
} }
// Get embedding model info for a provider and model id
export const GetEmbeddingModelInfo = (provider: ApiProvider, modelId: string): EmbeddingModelInfo => { export const GetEmbeddingModelInfo = (provider: ApiProvider, modelId: string): EmbeddingModelInfo => {
const models = GetEmbeddingProviderModels(provider) const models = GetEmbeddingProviderModels(provider)
return models[modelId] return models[modelId]

View File

@ -4,14 +4,14 @@ import { ResponseUsage } from '../types/llm/response'
import { GetProviderModels } from './api' import { GetProviderModels } from './api'
// Returns the cost in dollars. Returns null if the model is not supported. // Returns the cost in dollars. Returns null if the model is not supported.
export const calculateLLMCost = ({ export const calculateLLMCost = async ({
model, model,
usage, usage,
}: { }: {
model: LLMModel model: LLMModel
usage: ResponseUsage usage: ResponseUsage
}): number | null => { }): Promise<number | null> => {
const providerModels = GetProviderModels(model.provider) const providerModels = await GetProviderModels(model.provider)
if (!providerModels) { if (!providerModels) {
return null return null
} }