add grok
This commit is contained in:
parent
169c687ed3
commit
812aa76376
@ -29,6 +29,7 @@ export const SUPPORT_EMBEDDING_SIMENTION: number[] = [
|
|||||||
export const OPENAI_BASE_URL = 'https://api.openai.com/v1'
|
export const OPENAI_BASE_URL = 'https://api.openai.com/v1'
|
||||||
export const DEEPSEEK_BASE_URL = 'https://api.deepseek.com'
|
export const DEEPSEEK_BASE_URL = 'https://api.deepseek.com'
|
||||||
export const OPENROUTER_BASE_URL = 'https://openrouter.ai/api/v1'
|
export const OPENROUTER_BASE_URL = 'https://openrouter.ai/api/v1'
|
||||||
|
export const GROK_BASE_URL = 'https://api.x.ai/v1'
|
||||||
export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1'
|
export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1'
|
||||||
export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
|
export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
|
||||||
export const INFIO_BASE_URL = 'https://api.infio.com/api/raw_message'
|
export const INFIO_BASE_URL = 'https://api.infio.com/api/raw_message'
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants'
|
import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, GROK_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants'
|
||||||
import { ApiProvider, LLMModel } from '../../types/llm/model'
|
import { ApiProvider, LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
@ -39,6 +39,7 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
private anthropicProvider: AnthropicProvider
|
private anthropicProvider: AnthropicProvider
|
||||||
private googleProvider: GeminiProvider
|
private googleProvider: GeminiProvider
|
||||||
private groqProvider: GroqProvider
|
private groqProvider: GroqProvider
|
||||||
|
private grokProvider: OpenAICompatibleProvider
|
||||||
private infioProvider: InfioProvider
|
private infioProvider: InfioProvider
|
||||||
private openrouterProvider: OpenAICompatibleProvider
|
private openrouterProvider: OpenAICompatibleProvider
|
||||||
private siliconflowProvider: OpenAICompatibleProvider
|
private siliconflowProvider: OpenAICompatibleProvider
|
||||||
@ -77,6 +78,16 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
this.anthropicProvider = new AnthropicProvider(settings.anthropicProvider.apiKey)
|
this.anthropicProvider = new AnthropicProvider(settings.anthropicProvider.apiKey)
|
||||||
this.googleProvider = new GeminiProvider(settings.googleProvider.apiKey)
|
this.googleProvider = new GeminiProvider(settings.googleProvider.apiKey)
|
||||||
this.groqProvider = new GroqProvider(settings.groqProvider.apiKey)
|
this.groqProvider = new GroqProvider(settings.groqProvider.apiKey)
|
||||||
|
console.log('GrokProvider',
|
||||||
|
settings.grokProvider.apiKey,
|
||||||
|
settings.grokProvider.baseUrl,
|
||||||
|
settings.grokProvider.useCustomUrl
|
||||||
|
)
|
||||||
|
this.grokProvider = new OpenAICompatibleProvider(settings.grokProvider.apiKey,
|
||||||
|
settings.grokProvider.baseUrl && settings.grokProvider.useCustomUrl ?
|
||||||
|
settings.grokProvider.baseUrl
|
||||||
|
: GROK_BASE_URL
|
||||||
|
)
|
||||||
this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl)
|
this.ollamaProvider = new OllamaProvider(settings.ollamaProvider.baseUrl)
|
||||||
this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl)
|
this.openaiCompatibleProvider = new OpenAICompatibleProvider(settings.openaicompatibleProvider.apiKey, settings.openaicompatibleProvider.baseUrl)
|
||||||
this.isInfioEnabled = !!settings.infioProvider.apiKey
|
this.isInfioEnabled = !!settings.infioProvider.apiKey
|
||||||
@ -145,6 +156,12 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
request,
|
request,
|
||||||
options,
|
options,
|
||||||
)
|
)
|
||||||
|
case ApiProvider.Grok:
|
||||||
|
return await this.grokProvider.generateResponse(
|
||||||
|
model,
|
||||||
|
request,
|
||||||
|
options,
|
||||||
|
)
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
return await this.openaiCompatibleProvider.generateResponse(model, request, options)
|
return await this.openaiCompatibleProvider.generateResponse(model, request, options)
|
||||||
default:
|
default:
|
||||||
@ -182,6 +199,8 @@ class LLMManager implements LLMManagerInterface {
|
|||||||
return await this.googleProvider.streamResponse(model, request, options)
|
return await this.googleProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.Groq:
|
case ApiProvider.Groq:
|
||||||
return await this.groqProvider.streamResponse(model, request, options)
|
return await this.groqProvider.streamResponse(model, request, options)
|
||||||
|
case ApiProvider.Grok:
|
||||||
|
return await this.grokProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return await this.ollamaProvider.streamResponse(model, request, options)
|
return await this.ollamaProvider.streamResponse(model, request, options)
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
|
|||||||
@ -22,7 +22,8 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
|
|||||||
private baseURL: string
|
private baseURL: string
|
||||||
|
|
||||||
constructor(apiKey: string, baseURL: string) {
|
constructor(apiKey: string, baseURL: string) {
|
||||||
this.adapter = new OpenAIMessageAdapter()
|
console.log('OpenAICompatibleProvider constructor', apiKey, baseURL)
|
||||||
|
this.adapter = new OpenAIMessageAdapter()
|
||||||
this.client = new OpenAI({
|
this.client = new OpenAI({
|
||||||
apiKey: apiKey,
|
apiKey: apiKey,
|
||||||
baseURL: baseURL,
|
baseURL: baseURL,
|
||||||
@ -37,6 +38,7 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
|
|||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
|
console.log('OpenAICompatibleProvider generateResponse', this.baseURL, this.apiKey)
|
||||||
if (!this.baseURL || !this.apiKey) {
|
if (!this.baseURL || !this.apiKey) {
|
||||||
throw new LLMBaseUrlNotSetException(
|
throw new LLMBaseUrlNotSetException(
|
||||||
'OpenAI Compatible base URL or API key is missing. Please set it in settings menu.',
|
'OpenAI Compatible base URL or API key is missing. Please set it in settings menu.',
|
||||||
|
|||||||
@ -23,6 +23,7 @@ type ProviderSettingKey =
|
|||||||
| 'deepseekProvider'
|
| 'deepseekProvider'
|
||||||
| 'googleProvider'
|
| 'googleProvider'
|
||||||
| 'groqProvider'
|
| 'groqProvider'
|
||||||
|
| 'grokProvider'
|
||||||
| 'ollamaProvider'
|
| 'ollamaProvider'
|
||||||
| 'openaicompatibleProvider';
|
| 'openaicompatibleProvider';
|
||||||
|
|
||||||
@ -36,8 +37,9 @@ const keyMap: Record<ApiProvider, ProviderSettingKey> = {
|
|||||||
'Deepseek': 'deepseekProvider',
|
'Deepseek': 'deepseekProvider',
|
||||||
'Google': 'googleProvider',
|
'Google': 'googleProvider',
|
||||||
'Groq': 'groqProvider',
|
'Groq': 'groqProvider',
|
||||||
|
'Grok': 'grokProvider',
|
||||||
'Ollama': 'ollamaProvider',
|
'Ollama': 'ollamaProvider',
|
||||||
'OpenAICompatible': 'openaicompatibleProvider'
|
'OpenAICompatible': 'openaicompatibleProvider',
|
||||||
};
|
};
|
||||||
|
|
||||||
const getProviderSettingKey = (provider: ApiProvider): ProviderSettingKey => {
|
const getProviderSettingKey = (provider: ApiProvider): ProviderSettingKey => {
|
||||||
|
|||||||
@ -8,6 +8,7 @@ export enum ApiProvider {
|
|||||||
OpenAI = "OpenAI",
|
OpenAI = "OpenAI",
|
||||||
Google = "Google",
|
Google = "Google",
|
||||||
Groq = "Groq",
|
Groq = "Groq",
|
||||||
|
Grok = "Grok",
|
||||||
Ollama = "Ollama",
|
Ollama = "Ollama",
|
||||||
OpenAICompatible = "OpenAICompatible",
|
OpenAICompatible = "OpenAICompatible",
|
||||||
}
|
}
|
||||||
|
|||||||
@ -296,6 +296,12 @@ describe('settings migration', () => {
|
|||||||
baseUrl: '',
|
baseUrl: '',
|
||||||
useCustomUrl: false,
|
useCustomUrl: false,
|
||||||
},
|
},
|
||||||
|
grokProvider: {
|
||||||
|
name: 'Grok',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false,
|
||||||
|
},
|
||||||
infioProvider: {
|
infioProvider: {
|
||||||
name: 'Infio',
|
name: 'Infio',
|
||||||
apiKey: '',
|
apiKey: '',
|
||||||
|
|||||||
@ -147,6 +147,18 @@ const GroqProviderSchema = z.object({
|
|||||||
useCustomUrl: false
|
useCustomUrl: false
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const GrokProviderSchema = z.object({
|
||||||
|
name: z.literal('Grok'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Grok',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
const ollamaModelSchema = z.object({
|
const ollamaModelSchema = z.object({
|
||||||
baseUrl: z.string().catch(''),
|
baseUrl: z.string().catch(''),
|
||||||
model: z.string().catch(''),
|
model: z.string().catch(''),
|
||||||
@ -205,6 +217,7 @@ export const InfioSettingsSchema = z.object({
|
|||||||
googleProvider: GoogleProviderSchema,
|
googleProvider: GoogleProviderSchema,
|
||||||
ollamaProvider: OllamaProviderSchema,
|
ollamaProvider: OllamaProviderSchema,
|
||||||
groqProvider: GroqProviderSchema,
|
groqProvider: GroqProviderSchema,
|
||||||
|
grokProvider: GrokProviderSchema,
|
||||||
openaicompatibleProvider: OpenAICompatibleProviderSchema,
|
openaicompatibleProvider: OpenAICompatibleProviderSchema,
|
||||||
|
|
||||||
// Chat Model
|
// Chat Model
|
||||||
|
|||||||
129
src/utils/api.ts
129
src/utils/api.ts
@ -1,4 +1,4 @@
|
|||||||
import { OPENROUTER_BASE_URL } from '../constants'
|
import { GROK_BASE_URL, OPENROUTER_BASE_URL } from '../constants'
|
||||||
import { ApiProvider } from '../types/llm/model'
|
import { ApiProvider } from '../types/llm/model'
|
||||||
|
|
||||||
export interface ModelInfo {
|
export interface ModelInfo {
|
||||||
@ -159,6 +159,40 @@ export const openRouterDefaultModelInfo: ModelInfo = {
|
|||||||
description:
|
description:
|
||||||
"The new Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at:\n\n- Coding: New Sonnet scores ~49% on SWE-Bench Verified, higher than the last best score, and without any fancy prompt scaffolding\n- Data science: Augments human data science expertise; navigates unstructured data while using multiple tools for insights\n- Visual processing: excelling at interpreting charts, graphs, and images, accurately transcribing text to derive insights beyond just the text alone\n- Agentic tasks: exceptional tool use, making it great at agentic tasks (i.e. complex, multi-step problem solving tasks that require engaging with other systems)\n\n#multimodal",
|
"The new Claude 3.5 Sonnet delivers better-than-Opus capabilities, faster-than-Sonnet speeds, at the same Sonnet prices. Sonnet is particularly good at:\n\n- Coding: New Sonnet scores ~49% on SWE-Bench Verified, higher than the last best score, and without any fancy prompt scaffolding\n- Data science: Augments human data science expertise; navigates unstructured data while using multiple tools for insights\n- Visual processing: excelling at interpreting charts, graphs, and images, accurately transcribing text to derive insights beyond just the text alone\n- Agentic tasks: exceptional tool use, making it great at agentic tasks (i.e. complex, multi-step problem solving tasks that require engaging with other systems)\n\n#multimodal",
|
||||||
}
|
}
|
||||||
|
let openRouterModelsCache: Record<string, ModelInfo> | null = null;
|
||||||
|
async function fetchOpenRouterModels(): Promise<Record<string, ModelInfo>> {
|
||||||
|
if (openRouterModelsCache) {
|
||||||
|
return openRouterModelsCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(OPENROUTER_BASE_URL + "/models");
|
||||||
|
const data = await response.json();
|
||||||
|
const models: Record<string, ModelInfo> = {};
|
||||||
|
|
||||||
|
if (data?.data) {
|
||||||
|
for (const model of data.data) {
|
||||||
|
models[model.id] = {
|
||||||
|
maxTokens: model.top_provider?.max_completion_tokens ?? model.context_length,
|
||||||
|
contextWindow: model.context_length,
|
||||||
|
supportsImages: model.architecture?.modality?.includes("image") ?? false,
|
||||||
|
supportsPromptCache: false,
|
||||||
|
inputPrice: model.pricing?.prompt ?? 0,
|
||||||
|
outputPrice: model.pricing?.completion ?? 0,
|
||||||
|
description: model.description,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
openRouterModelsCache = models;
|
||||||
|
return models;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to fetch OpenRouter models:', error);
|
||||||
|
return {
|
||||||
|
[openRouterDefaultModelId]: openRouterDefaultModelInfo
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Gemini
|
// Gemini
|
||||||
// https://ai.google.dev/gemini-api/docs/models/gemini
|
// https://ai.google.dev/gemini-api/docs/models/gemini
|
||||||
@ -1130,6 +1164,61 @@ export const groqModels = {
|
|||||||
},
|
},
|
||||||
} as const satisfies Record<string, ModelInfo>
|
} as const satisfies Record<string, ModelInfo>
|
||||||
|
|
||||||
|
// Grok
|
||||||
|
// https://docs.x.ai/docs/models
|
||||||
|
export type GrokModelId = keyof typeof grokModels
|
||||||
|
export const grokDefaultModelId: GrokModelId = "grok-3"
|
||||||
|
export const grokModels = {
|
||||||
|
"grok-3": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
},
|
||||||
|
"grok-3-fast": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
},
|
||||||
|
"grok-3-mini": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
},
|
||||||
|
"grok-3-mini-fast": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: false,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
},
|
||||||
|
"grok-2-vision": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
},
|
||||||
|
"grok-2-image": {
|
||||||
|
maxTokens: 8192,
|
||||||
|
contextWindow: 131072,
|
||||||
|
supportsImages: true,
|
||||||
|
supportsPromptCache: true,
|
||||||
|
inputPrice: 0,
|
||||||
|
outputPrice: 0,
|
||||||
|
}
|
||||||
|
} as const satisfies Record<string, ModelInfo>
|
||||||
|
|
||||||
/// helper functions
|
/// helper functions
|
||||||
// get all providers
|
// get all providers
|
||||||
export const GetAllProviders = (): ApiProvider[] => {
|
export const GetAllProviders = (): ApiProvider[] => {
|
||||||
@ -1147,42 +1236,6 @@ export const GetEmbeddingProviders = (): ApiProvider[] => {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
let openRouterModelsCache: Record<string, ModelInfo> | null = null;
|
|
||||||
|
|
||||||
async function fetchOpenRouterModels(): Promise<Record<string, ModelInfo>> {
|
|
||||||
if (openRouterModelsCache) {
|
|
||||||
return openRouterModelsCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(OPENROUTER_BASE_URL + "/models");
|
|
||||||
const data = await response.json();
|
|
||||||
const models: Record<string, ModelInfo> = {};
|
|
||||||
|
|
||||||
if (data?.data) {
|
|
||||||
for (const model of data.data) {
|
|
||||||
models[model.id] = {
|
|
||||||
maxTokens: model.top_provider?.max_completion_tokens ?? model.context_length,
|
|
||||||
contextWindow: model.context_length,
|
|
||||||
supportsImages: model.architecture?.modality?.includes("image") ?? false,
|
|
||||||
supportsPromptCache: false,
|
|
||||||
inputPrice: model.pricing?.prompt ?? 0,
|
|
||||||
outputPrice: model.pricing?.completion ?? 0,
|
|
||||||
description: model.description,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
openRouterModelsCache = models;
|
|
||||||
return models;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to fetch OpenRouter models:', error);
|
|
||||||
return {
|
|
||||||
[openRouterDefaultModelId]: openRouterDefaultModelInfo
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get all models for a provider
|
// Get all models for a provider
|
||||||
export const GetProviderModels = async (provider: ApiProvider): Promise<Record<string, ModelInfo>> => {
|
export const GetProviderModels = async (provider: ApiProvider): Promise<Record<string, ModelInfo>> => {
|
||||||
switch (provider) {
|
switch (provider) {
|
||||||
@ -1204,6 +1257,8 @@ export const GetProviderModels = async (provider: ApiProvider): Promise<Record<s
|
|||||||
return geminiModels
|
return geminiModels
|
||||||
case ApiProvider.Groq:
|
case ApiProvider.Groq:
|
||||||
return groqModels
|
return groqModels
|
||||||
|
case ApiProvider.Grok:
|
||||||
|
return grokModels
|
||||||
case ApiProvider.Ollama:
|
case ApiProvider.Ollama:
|
||||||
return {}
|
return {}
|
||||||
case ApiProvider.OpenAICompatible:
|
case ApiProvider.OpenAICompatible:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user