mirror of
https://github.com/EthanMarti/infio-copilot.git
synced 2026-01-16 08:21:55 +00:00
Added Moonshot API support, updated relevant models and settings, optimized OpenAI-compatible providers to handle CORS issues, and enhanced model management capabilities.
This commit is contained in:
parent
34296e6871
commit
c0cd2ccf4d
@ -33,6 +33,7 @@ export const OPENROUTER_BASE_URL = 'https://openrouter.ai/api/v1'
|
|||||||
export const GROK_BASE_URL = 'https://api.x.ai/v1'
|
export const GROK_BASE_URL = 'https://api.x.ai/v1'
|
||||||
export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1'
|
export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1'
|
||||||
export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
|
export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
|
||||||
|
export const MOONSHOT_BASE_URL = 'https://api.moonshot.cn/v1'
|
||||||
export const INFIO_BASE_URL = 'https://api.infio.app'
|
export const INFIO_BASE_URL = 'https://api.infio.app'
|
||||||
export const JINA_BASE_URL = 'https://r.jina.ai'
|
export const JINA_BASE_URL = 'https://r.jina.ai'
|
||||||
export const SERPER_BASE_URL = 'https://serpapi.com/search'
|
export const SERPER_BASE_URL = 'https://serpapi.com/search'
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, GROK_BASE_URL, INFIO_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants'
|
import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, GROK_BASE_URL, INFIO_BASE_URL, MOONSHOT_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants'
|
||||||
import { ApiProvider, LLMModel } from '../../types/llm/model'
|
import { ApiProvider, LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
@ -39,6 +39,7 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
private googleProvider: GeminiProvider
|
private googleProvider: GeminiProvider
|
||||||
private groqProvider: GroqProvider
|
private groqProvider: GroqProvider
|
||||||
private grokProvider: OpenAICompatibleProvider
|
private grokProvider: OpenAICompatibleProvider
|
||||||
|
private moonshotProvider: OpenAICompatibleProvider
|
||||||
private infioProvider: OpenAICompatibleProvider
|
private infioProvider: OpenAICompatibleProvider
|
||||||
private openrouterProvider: OpenAICompatibleProvider
|
private openrouterProvider: OpenAICompatibleProvider
|
||||||
private siliconflowProvider: OpenAICompatibleProvider
|
private siliconflowProvider: OpenAICompatibleProvider
|
||||||
@ -85,6 +86,12 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
settings.grokProvider.baseUrl
|
settings.grokProvider.baseUrl
|
||||||
: GROK_BASE_URL
|
: GROK_BASE_URL
|
||||||
)
|
)
|
||||||
|
this.moonshotProvider = new OpenAICompatibleProvider(
|
||||||
|
settings.moonshotProvider.apiKey,
|
||||||
|
settings.moonshotProvider.baseUrl && settings.moonshotProvider.useCustomUrl ?
|
||||||
|
settings.moonshotProvider.baseUrl
|
||||||
|
: MOONSHOT_BASE_URL
|
||||||
|
)
|
||||||
this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl)
|
this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl)
|
||||||
this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl)
|
this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl)
|
||||||
this.isInfioEnabled = !!settings.infioProvider.apiKey
|
this.isInfioEnabled = !!settings.infioProvider.apiKey
|
||||||
@ -158,6 +165,12 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
request,
|
request,
|
||||||
options,
|
options,
|
||||||
)
|
)
|
||||||
|
case ApiProvider.Moonshot:
|
||||||
|
return await this.moonshotProvider.generateResponse(
|
||||||
|
model,
|
||||||
|
request,
|
||||||
|
options,
|
||||||
|
)
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
return await this.openaiCompatibleProvider.generateResponse(model, request, options)
|
return await this.openaiCompatibleProvider.generateResponse(model, request, options)
|
||||||
default:
|
default:
|
||||||
@ -195,6 +208,8 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
return await this.groqProvider.streamResponse(model, request, options)
|
return await this.groqProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.Grok:
|
case ApiProvider.Grok:
|
||||||
return await this.grokProvider.streamResponse(model, request, options)
|
return await this.grokProvider.streamResponse(model, request, options)
|
||||||
|
case ApiProvider.Moonshot:
|
||||||
|
return await this.moonshotProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return await this.ollamaProvider.streamResponse(model, request, options)
|
return await this.ollamaProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
|
|||||||
@ -24,7 +24,10 @@ import { OpenAIMessageAdapter } from './openai-message-adapter'
|
|||||||
|
|
||||||
export class NoStainlessOpenAI extends OpenAI {
|
export class NoStainlessOpenAI extends OpenAI {
|
||||||
defaultHeaders() {
|
defaultHeaders() {
|
||||||
|
// 获取父类的默认头部,包含 Authorization
|
||||||
|
const parentHeaders = super.defaultHeaders()
|
||||||
return {
|
return {
|
||||||
|
...parentHeaders,
|
||||||
Accept: 'application/json',
|
Accept: 'application/json',
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
|
|
||||||
import { ALIBABA_QWEN_BASE_URL } from '../../constants'
|
import { ALIBABA_QWEN_BASE_URL, MOONSHOT_BASE_URL } from '../../constants'
|
||||||
import { LLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
@ -14,6 +14,7 @@ import {
|
|||||||
|
|
||||||
import { BaseLLMProvider } from './base'
|
import { BaseLLMProvider } from './base'
|
||||||
import { LLMBaseUrlNotSetException } from './exception'
|
import { LLMBaseUrlNotSetException } from './exception'
|
||||||
|
import { NoStainlessOpenAI } from './ollama'
|
||||||
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
||||||
|
|
||||||
export class OpenAICompatibleProvider implements BaseLLMProvider {
|
export class OpenAICompatibleProvider implements BaseLLMProvider {
|
||||||
@ -23,14 +24,27 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
|
|||||||
private baseURL: string
|
private baseURL: string
|
||||||
|
|
||||||
constructor(apiKey: string, baseURL: string) {
|
constructor(apiKey: string, baseURL: string) {
|
||||||
this.adapter = new OpenAIMessageAdapter()
|
this.adapter = new OpenAIMessageAdapter()
|
||||||
this.client = new OpenAI({
|
|
||||||
apiKey: apiKey,
|
|
||||||
baseURL: baseURL,
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
})
|
|
||||||
this.apiKey = apiKey
|
this.apiKey = apiKey
|
||||||
this.baseURL = baseURL
|
this.baseURL = baseURL
|
||||||
|
|
||||||
|
// 判断是否需要使用 NoStainlessOpenAI 来解决 CORS 问题
|
||||||
|
const needsCorsAdapter = baseURL === MOONSHOT_BASE_URL ||
|
||||||
|
baseURL?.includes('api.moonshot.cn')
|
||||||
|
|
||||||
|
if (needsCorsAdapter) {
|
||||||
|
this.client = new NoStainlessOpenAI({
|
||||||
|
apiKey: apiKey,
|
||||||
|
baseURL: baseURL,
|
||||||
|
dangerouslyAllowBrowser: true,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
this.client = new OpenAI({
|
||||||
|
apiKey: apiKey,
|
||||||
|
baseURL: baseURL,
|
||||||
|
dangerouslyAllowBrowser: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 检查是否为阿里云Qwen API
|
// 检查是否为阿里云Qwen API
|
||||||
|
|||||||
@ -29,6 +29,7 @@ type ProviderSettingKey =
|
|||||||
| 'googleProvider'
|
| 'googleProvider'
|
||||||
| 'groqProvider'
|
| 'groqProvider'
|
||||||
| 'grokProvider'
|
| 'grokProvider'
|
||||||
|
| 'moonshotProvider'
|
||||||
| 'ollamaProvider'
|
| 'ollamaProvider'
|
||||||
| 'openaicompatibleProvider'
|
| 'openaicompatibleProvider'
|
||||||
| 'localproviderProvider';
|
| 'localproviderProvider';
|
||||||
@ -44,6 +45,7 @@ const keyMap: Record<ApiProvider, ProviderSettingKey> = {
|
|||||||
'Google': 'googleProvider',
|
'Google': 'googleProvider',
|
||||||
'Groq': 'groqProvider',
|
'Groq': 'groqProvider',
|
||||||
'Grok': 'grokProvider',
|
'Grok': 'grokProvider',
|
||||||
|
'Moonshot': 'moonshotProvider',
|
||||||
'Ollama': 'ollamaProvider',
|
'Ollama': 'ollamaProvider',
|
||||||
'OpenAICompatible': 'openaicompatibleProvider',
|
'OpenAICompatible': 'openaicompatibleProvider',
|
||||||
'LocalProvider': 'localproviderProvider',
|
'LocalProvider': 'localproviderProvider',
|
||||||
|
|||||||
@ -9,6 +9,7 @@ export enum ApiProvider {
|
|||||||
Google = "Google",
|
Google = "Google",
|
||||||
Groq = "Groq",
|
Groq = "Groq",
|
||||||
Grok = "Grok",
|
Grok = "Grok",
|
||||||
|
Moonshot = "Moonshot",
|
||||||
Ollama = "Ollama",
|
Ollama = "Ollama",
|
||||||
OpenAICompatible = "OpenAICompatible",
|
OpenAICompatible = "OpenAICompatible",
|
||||||
LocalProvider = "LocalProvider",
|
LocalProvider = "LocalProvider",
|
||||||
|
|||||||
@ -184,6 +184,20 @@ const GrokProviderSchema = z.object({
|
|||||||
models: []
|
models: []
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const MoonshotProviderSchema = z.object({
|
||||||
|
name: z.literal('Moonshot'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false),
|
||||||
|
models: z.array(z.string()).catch([])
|
||||||
|
}).catch({
|
||||||
|
name: 'Moonshot',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false,
|
||||||
|
models: []
|
||||||
|
})
|
||||||
|
|
||||||
const LocalProviderSchema = z.object({
|
const LocalProviderSchema = z.object({
|
||||||
name: z.literal('LocalProvider'),
|
name: z.literal('LocalProvider'),
|
||||||
apiKey: z.string().catch(''),
|
apiKey: z.string().catch(''),
|
||||||
@ -271,6 +285,7 @@ export const InfioSettingsSchema = z.object({
|
|||||||
ollamaProvider: OllamaProviderSchema,
|
ollamaProvider: OllamaProviderSchema,
|
||||||
groqProvider: GroqProviderSchema,
|
groqProvider: GroqProviderSchema,
|
||||||
grokProvider: GrokProviderSchema,
|
grokProvider: GrokProviderSchema,
|
||||||
|
moonshotProvider: MoonshotProviderSchema,
|
||||||
openaicompatibleProvider: OpenAICompatibleProviderSchema,
|
openaicompatibleProvider: OpenAICompatibleProviderSchema,
|
||||||
localproviderProvider: LocalProviderSchema,
|
localproviderProvider: LocalProviderSchema,
|
||||||
|
|
||||||
|
|||||||
@ -1645,6 +1645,89 @@ export const grokModels = {
|
|||||||
}
|
}
|
||||||
} as const satisfies Record<string, ModelInfo>
|
} as const satisfies Record<string, ModelInfo>
|
||||||
|
|
||||||
|
// Moonshot
|
||||||
|
// https://platform.moonshot.cn/docs/pricing
|
||||||
|
export type MoonshotModelId = keyof typeof moonshotModels
|
||||||
|
export const moonshotDefaultModelId: MoonshotModelId = "kimi-k2-0711-preview"
|
||||||
|
export const moonshotDefaultInsightModelId: MoonshotModelId = "kimi-latest"
|
||||||
|
export const moonshotDefaultAutoCompleteModelId: MoonshotModelId = "kimi-latest"
|
||||||
|
export const moonshotDefaultEmbeddingModelId = null // this is not supported embedding model
|
||||||
|
|
||||||
|
export const moonshotModels = {
|
||||||
|
"kimi-k2-0711-preview": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 128_000,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "128k context length MoE architecture foundation model with strong coding and Agent capabilities, total parameters 1T, active parameters 32B"
|
||||||
|
},
|
||||||
|
"kimi-latest": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 128_000,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "Latest Kimi model version with 128k context length and image understanding capabilities"
|
||||||
|
},
|
||||||
|
"kimi-thinking-preview": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 128_000,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "Multimodal reasoning model with 128k context length, excels at deep reasoning tasks"
|
||||||
|
},
|
||||||
|
"moonshot-v1-8k": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 8_000,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "8k context length model optimized for short text generation"
|
||||||
|
},
|
||||||
|
"moonshot-v1-32k": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 32_000,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "32k context length model optimized for longer text generation"
|
||||||
|
},
|
||||||
|
"moonshot-v1-128k": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 128_000,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "128k context length model optimized for very long text generation"
|
||||||
|
},
|
||||||
|
"moonshot-v1-8k-vision-preview": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 8_000,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "8k context length vision model with image understanding capabilities"
|
||||||
|
},
|
||||||
|
"moonshot-v1-32k-vision-preview": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 32_000,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "32k context length vision model with image understanding capabilities"
|
||||||
|
},
|
||||||
|
"moonshot-v1-128k-vision-preview": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 128_000,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsComputerUse: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
description: "128k context length vision model with image understanding capabilities"
|
||||||
|
}
|
||||||
|
} as const satisfies Record<string, ModelInfo>
|
||||||
|
|
||||||
// LocalProvider (本地嵌入模型)
|
// LocalProvider (本地嵌入模型)
|
||||||
export const localProviderDefaultModelId = null // this is not supported for chat/autocomplete
|
export const localProviderDefaultModelId = null // this is not supported for chat/autocomplete
|
||||||
export const localProviderDefaultInsightModelId = null // this is not supported for insight
|
export const localProviderDefaultInsightModelId = null // this is not supported for insight
|
||||||
@ -1687,6 +1770,7 @@ export const GetAllProviders = (): ApiProvider[] => {
|
|||||||
ApiProvider.SiliconFlow,
|
ApiProvider.SiliconFlow,
|
||||||
ApiProvider.Deepseek,
|
ApiProvider.Deepseek,
|
||||||
ApiProvider.Groq,
|
ApiProvider.Groq,
|
||||||
|
ApiProvider.Moonshot,
|
||||||
ApiProvider.Ollama,
|
ApiProvider.Ollama,
|
||||||
ApiProvider.OpenAICompatible,
|
ApiProvider.OpenAICompatible,
|
||||||
ApiProvider.LocalProvider,
|
ApiProvider.LocalProvider,
|
||||||
@ -1731,6 +1815,8 @@ export const GetProviderModels = async (provider: ApiProvider, settings?: InfioS
|
|||||||
return groqModels
|
return groqModels
|
||||||
case ApiProvider.Grok:
|
case ApiProvider.Grok:
|
||||||
return grokModels
|
return grokModels
|
||||||
|
case ApiProvider.Moonshot:
|
||||||
|
return moonshotModels
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return {}
|
return {}
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
@ -1767,6 +1853,8 @@ export const GetProviderModelsWithSettings = async (provider: ApiProvider, setti
|
|||||||
return groqModels
|
return groqModels
|
||||||
case ApiProvider.Grok:
|
case ApiProvider.Grok:
|
||||||
return grokModels
|
return grokModels
|
||||||
|
case ApiProvider.Moonshot:
|
||||||
|
return moonshotModels
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return {}
|
return {}
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
@ -1888,6 +1976,13 @@ export const GetDefaultModelId = (provider: ApiProvider): { chat: string, insigh
|
|||||||
"autoComplete": grokDefaultAutoCompleteModelId,
|
"autoComplete": grokDefaultAutoCompleteModelId,
|
||||||
"embedding": grokDefaultEmbeddingModelId,
|
"embedding": grokDefaultEmbeddingModelId,
|
||||||
}
|
}
|
||||||
|
case ApiProvider.Moonshot:
|
||||||
|
return {
|
||||||
|
"chat": moonshotDefaultModelId,
|
||||||
|
"insight": moonshotDefaultInsightModelId,
|
||||||
|
"autoComplete": moonshotDefaultAutoCompleteModelId,
|
||||||
|
"embedding": moonshotDefaultEmbeddingModelId,
|
||||||
|
}
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return {
|
return {
|
||||||
"chat": null, // user-configured
|
"chat": null, // user-configured
|
||||||
|
|||||||
@ -12,6 +12,7 @@ export const providerApiUrls: Record<ApiProvider, string> = {
|
|||||||
[ApiProvider.Google]: 'https://aistudio.google.com/apikey',
|
[ApiProvider.Google]: 'https://aistudio.google.com/apikey',
|
||||||
[ApiProvider.Groq]: 'https://console.groq.com/keys',
|
[ApiProvider.Groq]: 'https://console.groq.com/keys',
|
||||||
[ApiProvider.Grok]: 'https://console.x.ai/',
|
[ApiProvider.Grok]: 'https://console.x.ai/',
|
||||||
|
[ApiProvider.Moonshot]: 'https://platform.moonshot.cn/console/api-keys',
|
||||||
[ApiProvider.Ollama]: '', // Ollama 不需要API Key
|
[ApiProvider.Ollama]: '', // Ollama 不需要API Key
|
||||||
[ApiProvider.OpenAICompatible]: '', // 自定义兼容API,无固定URL
|
[ApiProvider.OpenAICompatible]: '', // 自定义兼容API,无固定URL
|
||||||
[ApiProvider.LocalProvider]: '', // 本地提供者,无固定URL
|
[ApiProvider.LocalProvider]: '', // 本地提供者,无固定URL
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user