diff --git a/src/constants.ts b/src/constants.ts index 9d53df3..1ca64f4 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -33,6 +33,7 @@ export const OPENROUTER_BASE_URL = 'https://openrouter.ai/api/v1' export const GROK_BASE_URL = 'https://api.x.ai/v1' export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1' export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1' +export const MOONSHOT_BASE_URL = 'https://api.moonshot.cn/v1' export const INFIO_BASE_URL = 'https://api.infio.app' export const JINA_BASE_URL = 'https://r.jina.ai' export const SERPER_BASE_URL = 'https://serpapi.com/search' diff --git a/src/core/llm/manager.ts b/src/core/llm/manager.ts index 5dd1ed4..031466c 100644 --- a/src/core/llm/manager.ts +++ b/src/core/llm/manager.ts @@ -1,4 +1,4 @@ -import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, GROK_BASE_URL, INFIO_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants' +import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, GROK_BASE_URL, INFIO_BASE_URL, MOONSHOT_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants' import { ApiProvider, LLMModel } from '../../types/llm/model' import { LLMOptions, @@ -39,6 +39,7 @@ class LLMManager implements LLMManagerInterface { private googleProvider: GeminiProvider private groqProvider: GroqProvider private grokProvider: OpenAICompatibleProvider + private moonshotProvider: OpenAICompatibleProvider private infioProvider: OpenAICompatibleProvider private openrouterProvider: OpenAICompatibleProvider private siliconflowProvider: OpenAICompatibleProvider @@ -85,6 +86,12 @@ class LLMManager implements LLMManagerInterface { settings.grokProvider.baseUrl : GROK_BASE_URL ) + this.moonshotProvider = new OpenAICompatibleProvider( + settings.moonshotProvider.apiKey, + settings.moonshotProvider.baseUrl && settings.moonshotProvider.useCustomUrl ? + settings.moonshotProvider.baseUrl + : MOONSHOT_BASE_URL + ) this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl) this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl) this.isInfioEnabled = !!settings.infioProvider.apiKey @@ -158,6 +165,12 @@ class LLMManager implements LLMManagerInterface { request, options, ) + case ApiProvider.Moonshot: + return await this.moonshotProvider.generateResponse( + model, + request, + options, + ) case ApiProvider.OpenAICompatible: return await this.openaiCompatibleProvider.generateResponse(model, request, options) default: @@ -195,6 +208,8 @@ class LLMManager implements LLMManagerInterface { return await this.groqProvider.streamResponse(model, request, options) case ApiProvider.Grok: return await this.grokProvider.streamResponse(model, request, options) + case ApiProvider.Moonshot: + return await this.moonshotProvider.streamResponse(model, request, options) case ApiProvider.Ollama: return await this.ollamaProvider.streamResponse(model, request, options) case ApiProvider.OpenAICompatible: diff --git a/src/core/llm/ollama.ts b/src/core/llm/ollama.ts index bce4757..d7ad370 100644 --- a/src/core/llm/ollama.ts +++ b/src/core/llm/ollama.ts @@ -24,7 +24,10 @@ import { OpenAIMessageAdapter } from './openai-message-adapter' export class NoStainlessOpenAI extends OpenAI { defaultHeaders() { + // 获取父类的默认头部,包含 Authorization + const parentHeaders = super.defaultHeaders() return { + ...parentHeaders, Accept: 'application/json', 'Content-Type': 'application/json', } diff --git a/src/core/llm/openai-compatible.ts b/src/core/llm/openai-compatible.ts index 9ce940b..b254f21 100644 --- a/src/core/llm/openai-compatible.ts +++ b/src/core/llm/openai-compatible.ts @@ -1,6 +1,6 @@ import OpenAI from 'openai' -import { ALIBABA_QWEN_BASE_URL } from '../../constants' +import { ALIBABA_QWEN_BASE_URL, MOONSHOT_BASE_URL } from '../../constants' import { LLMModel } from '../../types/llm/model' import { LLMOptions, @@ -14,6 +14,7 @@ import { import { BaseLLMProvider } from './base' import { LLMBaseUrlNotSetException } from './exception' +import { NoStainlessOpenAI } from './ollama' import { OpenAIMessageAdapter } from './openai-message-adapter' export class OpenAICompatibleProvider implements BaseLLMProvider { @@ -23,14 +24,27 @@ export class OpenAICompatibleProvider implements BaseLLMProvider { private baseURL: string constructor(apiKey: string, baseURL: string) { - this.adapter = new OpenAIMessageAdapter() - this.client = new OpenAI({ - apiKey: apiKey, - baseURL: baseURL, - dangerouslyAllowBrowser: true, - }) + this.adapter = new OpenAIMessageAdapter() this.apiKey = apiKey this.baseURL = baseURL + + // 判断是否需要使用 NoStainlessOpenAI 来解决 CORS 问题 + const needsCorsAdapter = baseURL === MOONSHOT_BASE_URL || + baseURL?.includes('api.moonshot.cn') + + if (needsCorsAdapter) { + this.client = new NoStainlessOpenAI({ + apiKey: apiKey, + baseURL: baseURL, + dangerouslyAllowBrowser: true, + }) + } else { + this.client = new OpenAI({ + apiKey: apiKey, + baseURL: baseURL, + dangerouslyAllowBrowser: true, + }) + } } // 检查是否为阿里云Qwen API diff --git a/src/settings/components/ModelProviderSettings.tsx b/src/settings/components/ModelProviderSettings.tsx index 413f9ab..f387303 100644 --- a/src/settings/components/ModelProviderSettings.tsx +++ b/src/settings/components/ModelProviderSettings.tsx @@ -29,6 +29,7 @@ type ProviderSettingKey = | 'googleProvider' | 'groqProvider' | 'grokProvider' + | 'moonshotProvider' | 'ollamaProvider' | 'openaicompatibleProvider' | 'localproviderProvider'; @@ -44,6 +45,7 @@ const keyMap: Record = { 'Google': 'googleProvider', 'Groq': 'groqProvider', 'Grok': 'grokProvider', + 'Moonshot': 'moonshotProvider', 'Ollama': 'ollamaProvider', 'OpenAICompatible': 'openaicompatibleProvider', 'LocalProvider': 'localproviderProvider', diff --git a/src/types/llm/model.ts b/src/types/llm/model.ts index 6a4f974..ccd9500 100644 --- a/src/types/llm/model.ts +++ b/src/types/llm/model.ts @@ -9,6 +9,7 @@ export enum ApiProvider { Google = "Google", Groq = "Groq", Grok = "Grok", + Moonshot = "Moonshot", Ollama = "Ollama", OpenAICompatible = "OpenAICompatible", LocalProvider = "LocalProvider", diff --git a/src/types/settings.ts b/src/types/settings.ts index e9f19fe..89101d1 100644 --- a/src/types/settings.ts +++ b/src/types/settings.ts @@ -184,6 +184,20 @@ const GrokProviderSchema = z.object({ models: [] }) +const MoonshotProviderSchema = z.object({ + name: z.literal('Moonshot'), + apiKey: z.string().catch(''), + baseUrl: z.string().catch(''), + useCustomUrl: z.boolean().catch(false), + models: z.array(z.string()).catch([]) +}).catch({ + name: 'Moonshot', + apiKey: '', + baseUrl: '', + useCustomUrl: false, + models: [] +}) + const LocalProviderSchema = z.object({ name: z.literal('LocalProvider'), apiKey: z.string().catch(''), @@ -271,6 +285,7 @@ export const InfioSettingsSchema = z.object({ ollamaProvider: OllamaProviderSchema, groqProvider: GroqProviderSchema, grokProvider: GrokProviderSchema, + moonshotProvider: MoonshotProviderSchema, openaicompatibleProvider: OpenAICompatibleProviderSchema, localproviderProvider: LocalProviderSchema, diff --git a/src/utils/api.ts b/src/utils/api.ts index 2ab768e..53f2aea 100644 --- a/src/utils/api.ts +++ b/src/utils/api.ts @@ -1645,6 +1645,89 @@ export const grokModels = { } } as const satisfies Record +// Moonshot +// https://platform.moonshot.cn/docs/pricing +export type MoonshotModelId = keyof typeof moonshotModels +export const moonshotDefaultModelId: MoonshotModelId = "kimi-k2-0711-preview" +export const moonshotDefaultInsightModelId: MoonshotModelId = "kimi-latest" +export const moonshotDefaultAutoCompleteModelId: MoonshotModelId = "kimi-latest" +export const moonshotDefaultEmbeddingModelId = null // this is not supported embedding model + +export const moonshotModels = { + "kimi-k2-0711-preview": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: false, + supportsComputerUse: true, + supportsPromptCache: true, + description: "128k context length MoE architecture foundation model with strong coding and Agent capabilities, total parameters 1T, active parameters 32B" + }, + "kimi-latest": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + description: "Latest Kimi model version with 128k context length and image understanding capabilities" + }, + "kimi-thinking-preview": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + description: "Multimodal reasoning model with 128k context length, excels at deep reasoning tasks" + }, + "moonshot-v1-8k": { + maxTokens: 8192, + contextWindow: 8_000, + supportsImages: false, + supportsComputerUse: true, + supportsPromptCache: true, + description: "8k context length model optimized for short text generation" + }, + "moonshot-v1-32k": { + maxTokens: 8192, + contextWindow: 32_000, + supportsImages: false, + supportsComputerUse: true, + supportsPromptCache: true, + description: "32k context length model optimized for longer text generation" + }, + "moonshot-v1-128k": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: false, + supportsComputerUse: true, + supportsPromptCache: true, + description: "128k context length model optimized for very long text generation" + }, + "moonshot-v1-8k-vision-preview": { + maxTokens: 8192, + contextWindow: 8_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + description: "8k context length vision model with image understanding capabilities" + }, + "moonshot-v1-32k-vision-preview": { + maxTokens: 8192, + contextWindow: 32_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + description: "32k context length vision model with image understanding capabilities" + }, + "moonshot-v1-128k-vision-preview": { + maxTokens: 8192, + contextWindow: 128_000, + supportsImages: true, + supportsComputerUse: true, + supportsPromptCache: true, + description: "128k context length vision model with image understanding capabilities" + } +} as const satisfies Record + // LocalProvider (本地嵌入模型) export const localProviderDefaultModelId = null // this is not supported for chat/autocomplete export const localProviderDefaultInsightModelId = null // this is not supported for insight @@ -1687,6 +1770,7 @@ export const GetAllProviders = (): ApiProvider[] => { ApiProvider.SiliconFlow, ApiProvider.Deepseek, ApiProvider.Groq, + ApiProvider.Moonshot, ApiProvider.Ollama, ApiProvider.OpenAICompatible, ApiProvider.LocalProvider, @@ -1731,6 +1815,8 @@ export const GetProviderModels = async (provider: ApiProvider, settings?: InfioS return groqModels case ApiProvider.Grok: return grokModels + case ApiProvider.Moonshot: + return moonshotModels case ApiProvider.Ollama: return {} case ApiProvider.OpenAICompatible: @@ -1767,6 +1853,8 @@ export const GetProviderModelsWithSettings = async (provider: ApiProvider, setti return groqModels case ApiProvider.Grok: return grokModels + case ApiProvider.Moonshot: + return moonshotModels case ApiProvider.Ollama: return {} case ApiProvider.OpenAICompatible: @@ -1888,6 +1976,13 @@ export const GetDefaultModelId = (provider: ApiProvider): { chat: string, insigh "autoComplete": grokDefaultAutoCompleteModelId, "embedding": grokDefaultEmbeddingModelId, } + case ApiProvider.Moonshot: + return { + "chat": moonshotDefaultModelId, + "insight": moonshotDefaultInsightModelId, + "autoComplete": moonshotDefaultAutoCompleteModelId, + "embedding": moonshotDefaultEmbeddingModelId, + } case ApiProvider.Ollama: return { "chat": null, // user-configured diff --git a/src/utils/provider-urls.ts b/src/utils/provider-urls.ts index 0952534..82e4bbf 100644 --- a/src/utils/provider-urls.ts +++ b/src/utils/provider-urls.ts @@ -12,6 +12,7 @@ export const providerApiUrls: Record = { [ApiProvider.Google]: 'https://aistudio.google.com/apikey', [ApiProvider.Groq]: 'https://console.groq.com/keys', [ApiProvider.Grok]: 'https://console.x.ai/', + [ApiProvider.Moonshot]: 'https://platform.moonshot.cn/console/api-keys', [ApiProvider.Ollama]: '', // Ollama 不需要API Key [ApiProvider.OpenAICompatible]: '', // 自定义兼容API,无固定URL [ApiProvider.LocalProvider]: '', // 本地提供者,无固定URL