update models selected
This commit is contained in:
parent
57c3efdac3
commit
d3590a9b76
@ -4,7 +4,7 @@ import { t } from '../../lang/helpers';
|
||||
import InfioPlugin from "../../main";
|
||||
import { ApiProvider } from '../../types/llm/model';
|
||||
import { InfioSettings } from '../../types/settings';
|
||||
import { GetAllProviders } from '../../utils/api';
|
||||
import { GetAllProviders, GetDefaultModelId, GetEmbeddingProviders } from '../../utils/api';
|
||||
import { getProviderApiUrl } from '../../utils/provider-urls';
|
||||
|
||||
import { ApiKeyComponent, CustomUrlComponent } from './FormComponents';
|
||||
@ -57,7 +57,72 @@ const CustomProviderSettings: React.FC<CustomProviderSettingsProps> = ({ plugin,
|
||||
onSettingsUpdate?.();
|
||||
};
|
||||
|
||||
const providers = GetAllProviders();
|
||||
const providers = GetAllProviders(); // 按照重要程度排序
|
||||
const embeddingProviders = GetEmbeddingProviders(); // 按照重要程度排序
|
||||
|
||||
// 获取已设置API Key的提供商列表
|
||||
const getSettedProviders = (): ApiProvider[] => {
|
||||
return providers.filter(provider => {
|
||||
const providerSetting = getProviderSetting(provider);
|
||||
return providerSetting.apiKey && providerSetting.apiKey.trim() !== '';
|
||||
});
|
||||
};
|
||||
|
||||
// 一键配置模型
|
||||
const handleOneClickConfig = () => {
|
||||
const settedProviders = getSettedProviders();
|
||||
|
||||
if (settedProviders.length === 0) {
|
||||
// 提示用户未设置任何key
|
||||
alert("当前未设置任何key");
|
||||
return;
|
||||
}
|
||||
|
||||
// 选择chat和autocomplete的提供商(按providers排序选择最靠前的)
|
||||
const selectedProvider = providers.find(provider => settedProviders.includes(provider));
|
||||
|
||||
// 选择embedding的提供商(按embeddingProviders排序选择最靠前的)
|
||||
const embeddingProvider = embeddingProviders.find(provider => settedProviders.includes(provider));
|
||||
|
||||
// 准备要更新的设置对象
|
||||
const newSettings = { ...settings };
|
||||
let hasUpdates = false;
|
||||
|
||||
if (selectedProvider) {
|
||||
const defaultModels = GetDefaultModelId(selectedProvider);
|
||||
|
||||
// 设置chat和autocomplete模型
|
||||
if (defaultModels.chat) {
|
||||
newSettings.chatModelProvider = selectedProvider;
|
||||
newSettings.chatModelId = defaultModels.chat;
|
||||
hasUpdates = true;
|
||||
console.log(`已自动配置聊天模型:${selectedProvider}/${defaultModels.chat}`);
|
||||
}
|
||||
if (defaultModels.autoComplete) {
|
||||
newSettings.applyModelProvider = selectedProvider;
|
||||
newSettings.applyModelId = defaultModels.autoComplete;
|
||||
hasUpdates = true;
|
||||
console.log(`已自动配置自动补全模型:${selectedProvider}/${defaultModels.autoComplete}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (embeddingProvider) {
|
||||
const embeddingDefaultModels = GetDefaultModelId(embeddingProvider);
|
||||
|
||||
// 设置embedding模型
|
||||
if (embeddingDefaultModels.embedding) {
|
||||
newSettings.embeddingModelProvider = embeddingProvider;
|
||||
newSettings.embeddingModelId = embeddingDefaultModels.embedding;
|
||||
hasUpdates = true;
|
||||
console.log(`已自动配置嵌入模型:${embeddingProvider}/${embeddingDefaultModels.embedding}`);
|
||||
}
|
||||
}
|
||||
|
||||
// 一次性更新所有设置
|
||||
if (hasUpdates) {
|
||||
handleSettingsUpdate(newSettings);
|
||||
}
|
||||
};
|
||||
|
||||
const updateProviderApiKey = (provider: ApiProvider, value: string) => {
|
||||
const providerKey = getProviderSettingKey(provider);
|
||||
@ -232,7 +297,16 @@ const CustomProviderSettings: React.FC<CustomProviderSettingsProps> = ({ plugin,
|
||||
|
||||
{/* 模型选择区域 */}
|
||||
<div className="model-selection-section">
|
||||
<div className="model-selection-header">
|
||||
<h2 className="section-title">模型选择</h2>
|
||||
<button
|
||||
className="one-click-config-btn"
|
||||
onClick={handleOneClickConfig}
|
||||
title="自动配置模型为已设置API Key的提供商的推荐模型"
|
||||
>
|
||||
一键配置
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="model-selectors">
|
||||
<ComboBoxComponent
|
||||
@ -293,6 +367,44 @@ const CustomProviderSettings: React.FC<CustomProviderSettingsProps> = ({ plugin,
|
||||
border-bottom: 1px solid var(--background-modifier-border);
|
||||
}
|
||||
|
||||
/* 模型选择区域头部样式 */
|
||||
.model-selection-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
margin-bottom: var(--size-4-3);
|
||||
}
|
||||
|
||||
.model-selection-header .section-title {
|
||||
margin: 0;
|
||||
padding-bottom: 0;
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
/* 一键配置按钮样式 */
|
||||
.one-click-config-btn {
|
||||
background: var(--interactive-accent);
|
||||
color: var(--text-on-accent);
|
||||
border: none;
|
||||
border-radius: var(--radius-s);
|
||||
padding: var(--size-2-1) var(--size-4-2);
|
||||
font-size: var(--font-ui-smaller);
|
||||
font-weight: var(--font-weight-medium);
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease-in-out;
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.one-click-config-btn:hover {
|
||||
background: var(--interactive-accent-hover);
|
||||
box-shadow: 0 2px 6px rgba(0, 0, 0, 0.15);
|
||||
}
|
||||
|
||||
.one-click-config-btn:active {
|
||||
transform: translateY(1px);
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
/* 提供商标签页容器 */
|
||||
.provider-tabs {
|
||||
display: flex;
|
||||
@ -380,6 +492,12 @@ const CustomProviderSettings: React.FC<CustomProviderSettingsProps> = ({ plugin,
|
||||
padding: var(--size-2-1) var(--size-4-1);
|
||||
font-size: var(--font-ui-smaller);
|
||||
}
|
||||
|
||||
.model-selection-header {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
gap: var(--size-2-2);
|
||||
}
|
||||
}
|
||||
|
||||
/* 提供商名称高亮样式 */
|
||||
|
||||
120
src/utils/api.ts
120
src/utils/api.ts
@ -35,6 +35,8 @@ export interface EmbeddingModelInfo {
|
||||
// https://docs.anthropic.com/en/docs/about-claude/models
|
||||
export type AnthropicModelId = keyof typeof anthropicModels
|
||||
export const anthropicDefaultModelId: AnthropicModelId = "claude-sonnet-4-20250514"
|
||||
export const anthropicDefaultAutoCompleteModelId: AnthropicModelId = "claude-3-5-haiku-20241022"
|
||||
export const anthropicDefaultEmbeddingModelId: AnthropicModelId = null // this is not supported embedding model
|
||||
export const anthropicModels = {
|
||||
"claude-sonnet-4-20250514": {
|
||||
maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false.
|
||||
@ -128,7 +130,9 @@ export const anthropicModels = {
|
||||
} as const satisfies Record<string, ModelInfo> // as const assertion makes the object
|
||||
|
||||
// Infio
|
||||
export const infioDefaultModelId = "deepseek/deepseek-v3" // will always exist
|
||||
export const infioDefaultModelId = "anthropic/claude-sonnet-4" // for chat
|
||||
export const infioDefaultAutoCompleteModelId = "groq/llama-3.3-70b-versatile" // for auto complete
|
||||
export const infioDefaultEmbeddingModelId = "openai/text-embedding-3-small" // for embedding
|
||||
export const infioDefaultModelInfo: ModelInfo = {
|
||||
maxTokens: 8192,
|
||||
contextWindow: 65_536,
|
||||
@ -210,7 +214,9 @@ export const infioEmbeddingModels = {
|
||||
|
||||
// OpenRouter
|
||||
// https://openrouter.ai/models?order=newest&supported_parameters=tools
|
||||
export const openRouterDefaultModelId = "anthropic/claude-sonnet-4" // will always exist in openRouterModels
|
||||
export const openRouterDefaultModelId = "google/gemini-2.5-pro-preview" // for chat
|
||||
export const openRouterDefaultAutoCompleteModelId = "google/gemini-2.5-flash-preview-05-20" // for auto complete
|
||||
export const openRouterDefaultEmbeddingModelId = null // this is not supported embedding model
|
||||
export const openRouterDefaultModelInfo: ModelInfo = {
|
||||
maxTokens: 8192,
|
||||
contextWindow: 200_000,
|
||||
@ -262,7 +268,10 @@ async function fetchOpenRouterModels(): Promise<Record<string, ModelInfo>> {
|
||||
// Gemini
|
||||
// https://ai.google.dev/gemini-api/docs/models/gemini
|
||||
export type GeminiModelId = keyof typeof geminiModels
|
||||
export const geminiDefaultModelId: GeminiModelId = "gemini-2.5-flash-preview-05-20"
|
||||
export const geminiDefaultModelId: GeminiModelId = "gemini-2.5-pro-preview-05-06"
|
||||
export const geminiDefaultAutoCompleteModelId: GeminiModelId = "gemini-2.5-flash-preview-05-20"
|
||||
export const geminiDefaultEmbeddingModelId: keyof typeof geminiEmbeddingModels = "text-embedding-004"
|
||||
|
||||
export const geminiModels = {
|
||||
"gemini-2.5-flash-preview-05-20:thinking": {
|
||||
maxTokens: 65_535,
|
||||
@ -489,6 +498,9 @@ export const geminiEmbeddingModels = {
|
||||
// https://openai.com/api/pricing/
|
||||
export type OpenAiNativeModelId = keyof typeof openAiNativeModels
|
||||
export const openAiNativeDefaultModelId: OpenAiNativeModelId = "gpt-4o"
|
||||
export const openAiNativeDefaultAutoCompleteModelId: OpenAiNativeModelId = "gpt-4o-mini"
|
||||
export const openAiNativeDefaultEmbeddingModelId: keyof typeof openAINativeEmbeddingModels = "text-embedding-3-small"
|
||||
|
||||
export const openAiNativeModels = {
|
||||
// don't support tool use yet
|
||||
"o3-mini": {
|
||||
@ -594,6 +606,9 @@ export const openAINativeEmbeddingModels = {
|
||||
// https://api-docs.deepseek.com/quick_start/pricing
|
||||
export type DeepSeekModelId = keyof typeof deepSeekModels
|
||||
export const deepSeekDefaultModelId: DeepSeekModelId = "deepseek-chat"
|
||||
export const deepSeekDefaultAutoCompleteModelId: DeepSeekModelId = "deepseek-chat"
|
||||
export const deepSeekDefaultEmbeddingModelId = null // this is not supported embedding model
|
||||
|
||||
export const deepSeekModels = {
|
||||
"deepseek-chat": {
|
||||
maxTokens: 8_000,
|
||||
@ -620,7 +635,10 @@ export const deepSeekModels = {
|
||||
// Qwen
|
||||
// https://help.aliyun.com/zh/model-studio/getting-started/
|
||||
export type QwenModelId = keyof typeof qwenModels
|
||||
export const qwenDefaultModelId: QwenModelId = "qwen-max-latest"
|
||||
export const qwenDefaultModelId: QwenModelId = "qwen3-235b-a22b"
|
||||
export const qwenDefaultAutoCompleteModelId: QwenModelId = "qwen3-32b"
|
||||
export const qwenDefaultEmbeddingModelId: keyof typeof qwenEmbeddingModels = "text-embedding-v3"
|
||||
|
||||
export const qwenModels = {
|
||||
"qwen3-235b-a22b": {
|
||||
maxTokens: 129_024,
|
||||
@ -916,6 +934,9 @@ export const qwenEmbeddingModels = {
|
||||
// https://docs.siliconflow.cn/
|
||||
export type SiliconFlowModelId = keyof typeof siliconFlowModels
|
||||
export const siliconFlowDefaultModelId: SiliconFlowModelId = "deepseek-ai/DeepSeek-V3"
|
||||
export const siliconFlowDefaultAutoCompleteModelId: SiliconFlowModelId = "deepseek-ai/DeepSeek-V3"
|
||||
export const siliconFlowDefaultEmbeddingModelId: keyof typeof siliconFlowEmbeddingModels = "BAAI/bge-m3"
|
||||
|
||||
export const siliconFlowModels = {
|
||||
"01-ai/Yi-1.5-9B-Chat-16K": {
|
||||
maxTokens: 8192,
|
||||
@ -1396,6 +1417,9 @@ export const siliconFlowEmbeddingModels = {
|
||||
// https://console.groq.com/docs/overview
|
||||
export type GroqModelId = keyof typeof groqModels
|
||||
export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile"
|
||||
export const groqDefaultAutoCompleteModelId: GroqModelId = "llama-3.3-70b-versatile"
|
||||
export const groqDefaultEmbeddingModelId = null // this is not supported embedding model
|
||||
|
||||
export const groqModels = {
|
||||
"meta-llama/llama-4-scout-17b-16e-instruct": {
|
||||
maxTokens: 8192,
|
||||
@ -1554,6 +1578,9 @@ export const groqModels = {
|
||||
// https://docs.x.ai/docs/models
|
||||
export type GrokModelId = keyof typeof grokModels
|
||||
export const grokDefaultModelId: GrokModelId = "grok-3"
|
||||
export const grokDefaultAutoCompleteModelId: GrokModelId = "grok-3-mini-fast"
|
||||
export const grokDefaultEmbeddingModelId = null // this is not supported embedding model
|
||||
|
||||
export const grokModels = {
|
||||
"grok-3": {
|
||||
maxTokens: 8192,
|
||||
@ -1606,18 +1633,31 @@ export const grokModels = {
|
||||
} as const satisfies Record<string, ModelInfo>
|
||||
|
||||
/// helper functions
|
||||
// get all providers
|
||||
// get all providers, used for the provider dropdown
|
||||
export const GetAllProviders = (): ApiProvider[] => {
|
||||
return Object.values(ApiProvider)
|
||||
return [
|
||||
ApiProvider.Infio,
|
||||
ApiProvider.OpenRouter,
|
||||
ApiProvider.Anthropic,
|
||||
ApiProvider.OpenAI,
|
||||
ApiProvider.Google,
|
||||
ApiProvider.Grok,
|
||||
ApiProvider.AlibabaQwen,
|
||||
ApiProvider.SiliconFlow,
|
||||
ApiProvider.Deepseek,
|
||||
ApiProvider.Groq,
|
||||
ApiProvider.Ollama,
|
||||
ApiProvider.OpenAICompatible,
|
||||
]
|
||||
}
|
||||
|
||||
export const GetEmbeddingProviders = (): ApiProvider[] => {
|
||||
return [
|
||||
ApiProvider.Infio,
|
||||
ApiProvider.OpenAI,
|
||||
ApiProvider.SiliconFlow,
|
||||
ApiProvider.Google,
|
||||
ApiProvider.AlibabaQwen,
|
||||
ApiProvider.SiliconFlow,
|
||||
ApiProvider.OpenAICompatible,
|
||||
ApiProvider.Ollama,
|
||||
]
|
||||
@ -1728,3 +1768,69 @@ export const GetEmbeddingModelInfo = (provider: ApiProvider, modelId: string): E
|
||||
const models = GetEmbeddingProviderModels(provider)
|
||||
return models[modelId]
|
||||
}
|
||||
|
||||
// Get default model id for a provider
|
||||
export const GetDefaultModelId = (provider: ApiProvider): { chat: string, autoComplete: string, embedding: string } => {
|
||||
switch (provider) {
|
||||
case ApiProvider.Infio:
|
||||
return {
|
||||
"chat": infioDefaultModelId,
|
||||
"autoComplete": infioDefaultAutoCompleteModelId,
|
||||
"embedding": infioDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.OpenRouter:
|
||||
return {
|
||||
"chat": openRouterDefaultModelId,
|
||||
"autoComplete": openRouterDefaultAutoCompleteModelId,
|
||||
"embedding": openRouterDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.Anthropic:
|
||||
return {
|
||||
"chat": anthropicDefaultModelId,
|
||||
"autoComplete": anthropicDefaultAutoCompleteModelId,
|
||||
"embedding": anthropicDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.Deepseek:
|
||||
return {
|
||||
"chat": deepSeekDefaultModelId,
|
||||
"autoComplete": deepSeekDefaultAutoCompleteModelId,
|
||||
"embedding": deepSeekDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.Google:
|
||||
return {
|
||||
"chat": geminiDefaultModelId,
|
||||
"autoComplete": geminiDefaultAutoCompleteModelId,
|
||||
"embedding": geminiDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.AlibabaQwen:
|
||||
return {
|
||||
"chat": qwenDefaultModelId,
|
||||
"autoComplete": qwenDefaultAutoCompleteModelId,
|
||||
"embedding": qwenDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.SiliconFlow:
|
||||
return {
|
||||
"chat": siliconFlowDefaultModelId,
|
||||
"autoComplete": siliconFlowDefaultAutoCompleteModelId,
|
||||
"embedding": siliconFlowDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.Groq:
|
||||
return {
|
||||
"chat": groqDefaultModelId,
|
||||
"autoComplete": groqDefaultAutoCompleteModelId,
|
||||
"embedding": groqDefaultEmbeddingModelId,
|
||||
}
|
||||
case ApiProvider.Grok:
|
||||
return {
|
||||
"chat": grokDefaultModelId,
|
||||
"autoComplete": grokDefaultAutoCompleteModelId,
|
||||
"embedding": grokDefaultEmbeddingModelId,
|
||||
}
|
||||
default:
|
||||
return {
|
||||
"chat": null,
|
||||
"autoComplete": null,
|
||||
"embedding": null,
|
||||
}
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user