simple model config
This commit is contained in:
parent
bf29a42baa
commit
025dc85c59
@ -59,12 +59,14 @@
|
|||||||
"@radix-ui/react-dialog": "^1.1.2",
|
"@radix-ui/react-dialog": "^1.1.2",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.2",
|
"@radix-ui/react-dropdown-menu": "^2.1.2",
|
||||||
"@radix-ui/react-popover": "^1.1.2",
|
"@radix-ui/react-popover": "^1.1.2",
|
||||||
|
"@radix-ui/react-select": "^2.1.6",
|
||||||
"@radix-ui/react-tooltip": "^1.1.3",
|
"@radix-ui/react-tooltip": "^1.1.3",
|
||||||
"@tanstack/react-query": "^5.56.2",
|
"@tanstack/react-query": "^5.56.2",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"diff": "^7.0.0",
|
"diff": "^7.0.0",
|
||||||
"drizzle-orm": "^0.35.2",
|
"drizzle-orm": "^0.35.2",
|
||||||
"exponential-backoff": "^3.1.1",
|
"exponential-backoff": "^3.1.1",
|
||||||
|
"fuse.js": "^7.1.0",
|
||||||
"fuzzysort": "^3.1.0",
|
"fuzzysort": "^3.1.0",
|
||||||
"groq-sdk": "^0.7.0",
|
"groq-sdk": "^0.7.0",
|
||||||
"handlebars": "^4.7.7",
|
"handlebars": "^4.7.7",
|
||||||
|
|||||||
9493
pnpm-lock.yaml
generated
Normal file
9493
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -62,7 +62,7 @@ function LLMResponesInfoButton({ message }: { message: ChatAssistantMessage }) {
|
|||||||
<LLMResponseInfoPopover
|
<LLMResponseInfoPopover
|
||||||
usage={message.metadata?.usage}
|
usage={message.metadata?.usage}
|
||||||
estimatedPrice={cost}
|
estimatedPrice={cost}
|
||||||
model={message.metadata?.model?.name}
|
model={message.metadata?.model?.modelId}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
|
|||||||
@ -279,7 +279,7 @@ const Chat = forwardRef<ChatRef, ChatProps>((props, ref) => {
|
|||||||
const stream = await streamResponse(
|
const stream = await streamResponse(
|
||||||
chatModel,
|
chatModel,
|
||||||
{
|
{
|
||||||
model: chatModel.name,
|
model: chatModel.modelId,
|
||||||
messages: requestMessages,
|
messages: requestMessages,
|
||||||
stream: true,
|
stream: true,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,14 +1,18 @@
|
|||||||
import * as DropdownMenu from '@radix-ui/react-dropdown-menu'
|
import * as DropdownMenu from '@radix-ui/react-dropdown-menu'
|
||||||
import { ChevronDown, ChevronUp } from 'lucide-react'
|
import { ChevronDown, ChevronUp } from 'lucide-react'
|
||||||
import { useState } from 'react'
|
import { useMemo, useState } from 'react'
|
||||||
|
|
||||||
import { useSettings } from '../../../contexts/SettingsContext'
|
import { useSettings } from '../../../contexts/SettingsContext'
|
||||||
|
import { GetProviderModelIds } from "../../../utils/api"
|
||||||
export function ModelSelect() {
|
export function ModelSelect() {
|
||||||
const { settings, setSettings } = useSettings()
|
const { settings, setSettings } = useSettings()
|
||||||
const [isOpen, setIsOpen] = useState(false)
|
const [isOpen, setIsOpen] = useState(false)
|
||||||
|
|
||||||
const activeModels = settings.activeModels.filter((model) => model.enabled)
|
const[chatModelId, setChatModelId] = useState(settings.chatModelId)
|
||||||
|
|
||||||
|
const currProviderModels = useMemo(() => {
|
||||||
|
return GetProviderModelIds(settings.chatModelProvider)
|
||||||
|
}, [settings.chatModelProvider])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<DropdownMenu.Root open={isOpen} onOpenChange={setIsOpen}>
|
<DropdownMenu.Root open={isOpen} onOpenChange={setIsOpen}>
|
||||||
@ -17,11 +21,7 @@ export function ModelSelect() {
|
|||||||
{isOpen ? <ChevronUp size={12} /> : <ChevronDown size={12} />}
|
{isOpen ? <ChevronUp size={12} /> : <ChevronDown size={12} />}
|
||||||
</div>
|
</div>
|
||||||
<div className="infio-chat-input-model-select__model-name">
|
<div className="infio-chat-input-model-select__model-name">
|
||||||
{
|
{chatModelId}
|
||||||
activeModels.find(
|
|
||||||
(option) => option.name === settings.chatModelId,
|
|
||||||
)?.name
|
|
||||||
}
|
|
||||||
</div>
|
</div>
|
||||||
</DropdownMenu.Trigger>
|
</DropdownMenu.Trigger>
|
||||||
|
|
||||||
@ -29,18 +29,19 @@ export function ModelSelect() {
|
|||||||
<DropdownMenu.Content
|
<DropdownMenu.Content
|
||||||
className="infio-popover">
|
className="infio-popover">
|
||||||
<ul>
|
<ul>
|
||||||
{activeModels.map((model) => (
|
{currProviderModels.map((modelId) => (
|
||||||
<DropdownMenu.Item
|
<DropdownMenu.Item
|
||||||
key={model.name}
|
key={modelId}
|
||||||
onSelect={() => {
|
onSelect={() => {
|
||||||
|
setChatModelId(modelId)
|
||||||
setSettings({
|
setSettings({
|
||||||
...settings,
|
...settings,
|
||||||
chatModelId: model.name,
|
chatModelId: modelId,
|
||||||
})
|
})
|
||||||
}}
|
}}
|
||||||
asChild
|
asChild
|
||||||
>
|
>
|
||||||
<li>{model.name}</li>
|
<li>{modelId}</li>
|
||||||
</DropdownMenu.Item>
|
</DropdownMenu.Item>
|
||||||
))}
|
))}
|
||||||
</ul>
|
</ul>
|
||||||
|
|||||||
@ -1,10 +1,10 @@
|
|||||||
import { MarkdownView, Plugin } from 'obsidian';
|
import { MarkdownView, Plugin } from 'obsidian';
|
||||||
import React, { useEffect, useRef, useState } from 'react';
|
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||||
|
|
||||||
import { APPLY_VIEW_TYPE } from '../../constants';
|
import { APPLY_VIEW_TYPE } from '../../constants';
|
||||||
import LLMManager from '../../core/llm/manager';
|
import LLMManager from '../../core/llm/manager';
|
||||||
import { CustomLLMModel } from '../../types/llm/model';
|
|
||||||
import { InfioSettings } from '../../types/settings';
|
import { InfioSettings } from '../../types/settings';
|
||||||
|
import { GetProviderModelIds } from '../../utils/api';
|
||||||
import { manualApplyChangesToFile } from '../../utils/apply';
|
import { manualApplyChangesToFile } from '../../utils/apply';
|
||||||
import { removeAITags } from '../../utils/content-filter';
|
import { removeAITags } from '../../utils/content-filter';
|
||||||
import { PromptGenerator } from '../../utils/prompt-generator';
|
import { PromptGenerator } from '../../utils/prompt-generator';
|
||||||
@ -57,31 +57,35 @@ const ControlArea: React.FC<ControlAreaProps> = ({
|
|||||||
selectedModel,
|
selectedModel,
|
||||||
onModelChange,
|
onModelChange,
|
||||||
isSubmitting,
|
isSubmitting,
|
||||||
}) => (
|
}) => {
|
||||||
<div className="infio-ai-block-controls">
|
const currProviderModels = useMemo(() => {
|
||||||
<select
|
return GetProviderModelIds(settings.chatModelProvider)
|
||||||
className="infio-ai-block-model-select"
|
.map((modelId) => (
|
||||||
value={selectedModel}
|
<option key={modelId} value={modelId}>
|
||||||
onChange={(e) => onModelChange(e.target.value)}
|
{modelId}
|
||||||
disabled={isSubmitting}
|
</option>
|
||||||
>
|
))
|
||||||
{settings.activeModels
|
}, [settings])
|
||||||
.filter((model) => !model.isEmbeddingModel && model.enabled)
|
|
||||||
.map((model) => (
|
return (
|
||||||
<option key={model.name} value={model.name}>
|
<div className="infio-ai-block-controls">
|
||||||
{model.name}
|
<select
|
||||||
</option>
|
className="infio-ai-block-model-select"
|
||||||
))}
|
value={selectedModel}
|
||||||
</select>
|
onChange={(e) => onModelChange(e.target.value)}
|
||||||
<button
|
disabled={isSubmitting}
|
||||||
className="infio-ai-block-submit-button"
|
>
|
||||||
onClick={onSubmit}
|
{currProviderModels}
|
||||||
disabled={isSubmitting}
|
</select>
|
||||||
>
|
<button
|
||||||
{isSubmitting ? "Submitting..." : "Submit"}
|
className="infio-ai-block-submit-button"
|
||||||
</button>
|
onClick={onSubmit}
|
||||||
</div>
|
disabled={isSubmitting}
|
||||||
);
|
>
|
||||||
|
{isSubmitting ? "Submitting..." : "Submit"}
|
||||||
|
</button>
|
||||||
|
</div>);
|
||||||
|
};
|
||||||
|
|
||||||
export const InlineEdit: React.FC<InlineEditProps> = ({
|
export const InlineEdit: React.FC<InlineEditProps> = ({
|
||||||
source,
|
source,
|
||||||
@ -94,14 +98,7 @@ export const InlineEdit: React.FC<InlineEditProps> = ({
|
|||||||
const [selectedModel, setSelectedModel] = useState(settings.chatModelId);
|
const [selectedModel, setSelectedModel] = useState(settings.chatModelId);
|
||||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||||
|
|
||||||
const llmManager = new LLMManager({
|
const llmManager = new LLMManager(settings);
|
||||||
deepseek: settings.deepseekApiKey,
|
|
||||||
openai: settings.openAIApiKey,
|
|
||||||
anthropic: settings.anthropicApiKey,
|
|
||||||
gemini: settings.geminiApiKey,
|
|
||||||
groq: settings.groqApiKey,
|
|
||||||
infio: settings.infioApiKey,
|
|
||||||
});
|
|
||||||
|
|
||||||
const promptGenerator = new PromptGenerator(
|
const promptGenerator = new PromptGenerator(
|
||||||
async () => {
|
async () => {
|
||||||
@ -171,9 +168,10 @@ export const InlineEdit: React.FC<InlineEditProps> = ({
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const chatModel = settings.activeModels.find(
|
const chatModel = {
|
||||||
(model) => model.name === selectedModel
|
provider: settings.chatModelProvider,
|
||||||
) as CustomLLMModel;
|
modelId: settings.chatModelId,
|
||||||
|
};
|
||||||
if (!chatModel) {
|
if (!chatModel) {
|
||||||
setIsSubmitting(false);
|
setIsSubmitting(false);
|
||||||
throw new Error("Invalid chat model");
|
throw new Error("Invalid chat model");
|
||||||
@ -193,7 +191,7 @@ export const InlineEdit: React.FC<InlineEditProps> = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const response = await llmManager.generateResponse(chatModel, {
|
const response = await llmManager.generateResponse(chatModel, {
|
||||||
model: chatModel.name,
|
model: chatModel.modelId,
|
||||||
messages: requestMessages,
|
messages: requestMessages,
|
||||||
stream: false,
|
stream: false,
|
||||||
});
|
});
|
||||||
|
|||||||
135
src/constants.ts
135
src/constants.ts
@ -1,120 +1,22 @@
|
|||||||
import { CustomLLMModel } from './types/llm/model'
|
import { LLMModel } from './types/llm/model'
|
||||||
|
// import { ApiProvider } from './utils/api'
|
||||||
export const CHAT_VIEW_TYPE = 'infio-chat-view'
|
export const CHAT_VIEW_TYPE = 'infio-chat-view'
|
||||||
export const APPLY_VIEW_TYPE = 'infio-apply-view'
|
export const APPLY_VIEW_TYPE = 'infio-apply-view'
|
||||||
|
|
||||||
export const DEFAULT_MODELS: CustomLLMModel[] = [
|
export const DEFAULT_MODELS: LLMModel[] = []
|
||||||
{
|
|
||||||
name: 'claude-3.5-sonnet',
|
// export const PROVIDERS: ApiProvider[] = [
|
||||||
provider: 'anthropic',
|
// 'Infio',
|
||||||
enabled: true,
|
// 'OpenRouter',
|
||||||
isEmbeddingModel: false,
|
// 'SiliconFlow',
|
||||||
isBuiltIn: true,
|
// 'Anthropic',
|
||||||
},
|
// 'Deepseek',
|
||||||
{
|
// 'OpenAI',
|
||||||
name: 'o1-mini',
|
// 'Google',
|
||||||
provider: 'openai',
|
// 'Groq',
|
||||||
enabled: true,
|
// 'Ollama',
|
||||||
isEmbeddingModel: false,
|
// 'OpenAICompatible',
|
||||||
isBuiltIn: true,
|
// ]
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'o1-preview',
|
|
||||||
provider: 'openai',
|
|
||||||
enabled: false,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'gpt-4o',
|
|
||||||
provider: 'openai',
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'gpt-4o-mini',
|
|
||||||
provider: 'openai',
|
|
||||||
enabled: false,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'deepseek-chat',
|
|
||||||
provider: 'deepseek',
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'gemini-1.5-pro',
|
|
||||||
provider: 'google',
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'gemini-2.0-flash-exp',
|
|
||||||
provider: 'google',
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'gemini-2.0-flash-thinking-exp-1219',
|
|
||||||
provider: 'google',
|
|
||||||
enabled: false,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'llama-3.1-70b-versatile',
|
|
||||||
provider: 'groq',
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: false,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'text-embedding-3-small',
|
|
||||||
provider: 'openai',
|
|
||||||
dimension: 1536,
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: true,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'text-embedding-004',
|
|
||||||
provider: 'google',
|
|
||||||
dimension: 768,
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: true,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'nomic-embed-text',
|
|
||||||
provider: 'ollama',
|
|
||||||
dimension: 768,
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: true,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'mxbai-embed-large',
|
|
||||||
provider: 'ollama',
|
|
||||||
dimension: 1024,
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: true,
|
|
||||||
isBuiltIn: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'bge-m3',
|
|
||||||
provider: 'ollama',
|
|
||||||
dimension: 1024,
|
|
||||||
enabled: true,
|
|
||||||
isEmbeddingModel: true,
|
|
||||||
isBuiltIn: true,
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
export const SUPPORT_EMBEDDING_SIMENTION: number[] = [
|
export const SUPPORT_EMBEDDING_SIMENTION: number[] = [
|
||||||
384,
|
384,
|
||||||
@ -124,7 +26,12 @@ export const SUPPORT_EMBEDDING_SIMENTION: number[] = [
|
|||||||
1536
|
1536
|
||||||
]
|
]
|
||||||
|
|
||||||
|
export const OPENAI_BASE_URL = 'https://api.openai.com/v1'
|
||||||
export const DEEPSEEK_BASE_URL = 'https://api.deepseek.com'
|
export const DEEPSEEK_BASE_URL = 'https://api.deepseek.com'
|
||||||
|
export const OPENROUTER_BASE_URL = 'https://openrouter.ai/api/v1'
|
||||||
|
export const SILICONFLOW_BASE_URL = 'https://api.siliconflow.cn/v1'
|
||||||
|
export const ALIBABA_QWEN_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
|
||||||
|
export const INFIO_BASE_URL = 'https://api.infio.com/api/raw_message'
|
||||||
|
|
||||||
// Pricing in dollars per million tokens
|
// Pricing in dollars per million tokens
|
||||||
type ModelPricing = {
|
type ModelPricing = {
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import {
|
|||||||
} from 'react'
|
} from 'react'
|
||||||
|
|
||||||
import LLMManager from '../core/llm/manager'
|
import LLMManager from '../core/llm/manager'
|
||||||
import { CustomLLMModel } from '../types/llm/model'
|
import { LLMModel } from '../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -24,17 +24,17 @@ import { useSettings } from './SettingsContext'
|
|||||||
|
|
||||||
export type LLMContextType = {
|
export type LLMContextType = {
|
||||||
generateResponse: (
|
generateResponse: (
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
) => Promise<LLMResponseNonStreaming>
|
) => Promise<LLMResponseNonStreaming>
|
||||||
streamResponse: (
|
streamResponse: (
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
) => Promise<AsyncIterable<LLMResponseStreaming>>
|
) => Promise<AsyncIterable<LLMResponseStreaming>>
|
||||||
chatModel: CustomLLMModel
|
chatModel: LLMModel
|
||||||
applyModel: CustomLLMModel
|
applyModel: LLMModel
|
||||||
}
|
}
|
||||||
|
|
||||||
const LLMContext = createContext<LLMContextType | null>(null)
|
const LLMContext = createContext<LLMContextType | null>(null)
|
||||||
@ -43,55 +43,28 @@ export function LLMProvider({ children }: PropsWithChildren) {
|
|||||||
const [llmManager, setLLMManager] = useState<LLMManager | null>(null)
|
const [llmManager, setLLMManager] = useState<LLMManager | null>(null)
|
||||||
const { settings } = useSettings()
|
const { settings } = useSettings()
|
||||||
|
|
||||||
const chatModel = useMemo((): CustomLLMModel => {
|
const chatModel = useMemo((): LLMModel => {
|
||||||
const model = settings.activeModels.find(
|
return {
|
||||||
(option) => option.name === settings.chatModelId,
|
provider: settings.chatModelProvider,
|
||||||
)
|
modelId: settings.chatModelId,
|
||||||
if (!model) {
|
|
||||||
throw new Error('Invalid chat model ID')
|
|
||||||
}
|
}
|
||||||
return model as CustomLLMModel
|
|
||||||
}, [settings])
|
}, [settings])
|
||||||
|
|
||||||
const applyModel = useMemo((): CustomLLMModel => {
|
const applyModel = useMemo((): LLMModel => {
|
||||||
const model = settings.activeModels.find(
|
return {
|
||||||
(option) => option.name === settings.applyModelId,
|
provider: settings.applyModelProvider,
|
||||||
)
|
modelId: settings.applyModelId,
|
||||||
if (!model) {
|
|
||||||
throw new Error('Invalid apply model ID')
|
|
||||||
}
|
}
|
||||||
if (model.provider === 'ollama') {
|
|
||||||
return {
|
|
||||||
...model,
|
|
||||||
baseUrl: settings.ollamaApplyModel.baseUrl,
|
|
||||||
name: settings.ollamaApplyModel.model,
|
|
||||||
} as CustomLLMModel
|
|
||||||
}
|
|
||||||
return model as CustomLLMModel
|
|
||||||
}, [settings])
|
}, [settings])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const manager = new LLMManager({
|
const manager = new LLMManager(settings)
|
||||||
deepseek: settings.deepseekApiKey,
|
|
||||||
openai: settings.openAIApiKey,
|
|
||||||
anthropic: settings.anthropicApiKey,
|
|
||||||
gemini: settings.geminiApiKey,
|
|
||||||
groq: settings.groqApiKey,
|
|
||||||
infio: settings.infioApiKey,
|
|
||||||
})
|
|
||||||
setLLMManager(manager)
|
setLLMManager(manager)
|
||||||
}, [
|
}, [settings])
|
||||||
settings.deepseekApiKey,
|
|
||||||
settings.openAIApiKey,
|
|
||||||
settings.anthropicApiKey,
|
|
||||||
settings.geminiApiKey,
|
|
||||||
settings.groqApiKey,
|
|
||||||
settings.infioApiKey,
|
|
||||||
])
|
|
||||||
|
|
||||||
const generateResponse = useCallback(
|
const generateResponse = useCallback(
|
||||||
async (
|
async (
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
) => {
|
) => {
|
||||||
@ -105,7 +78,7 @@ export function LLMProvider({ children }: PropsWithChildren) {
|
|||||||
|
|
||||||
const streamResponse = useCallback(
|
const streamResponse = useCallback(
|
||||||
async (
|
async (
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
) => {
|
) => {
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import * as Handlebars from "handlebars";
|
|||||||
import { Result, err, ok } from "neverthrow";
|
import { Result, err, ok } from "neverthrow";
|
||||||
|
|
||||||
import { FewShotExample } from "../../settings/versions";
|
import { FewShotExample } from "../../settings/versions";
|
||||||
import { CustomLLMModel } from "../../types/llm/model";
|
import { LLMModel } from "../../types/llm/model";
|
||||||
import { RequestMessage } from '../../types/llm/request';
|
import { RequestMessage } from '../../types/llm/request';
|
||||||
import { InfioSettings } from "../../types/settings";
|
import { InfioSettings } from "../../types/settings";
|
||||||
import LLMManager from '../llm/manager';
|
import LLMManager from '../llm/manager';
|
||||||
@ -25,9 +25,9 @@ import {
|
|||||||
|
|
||||||
class LLMClient {
|
class LLMClient {
|
||||||
private llm: LLMManager;
|
private llm: LLMManager;
|
||||||
private model: CustomLLMModel;
|
private model: LLMModel;
|
||||||
|
|
||||||
constructor(llm: LLMManager, model: CustomLLMModel) {
|
constructor(llm: LLMManager, model: LLMModel) {
|
||||||
this.llm = llm;
|
this.llm = llm;
|
||||||
this.model = model;
|
this.model = model;
|
||||||
}
|
}
|
||||||
@ -100,17 +100,11 @@ class AutoComplete implements AutocompleteService {
|
|||||||
postProcessors.push(new RemoveOverlap());
|
postProcessors.push(new RemoveOverlap());
|
||||||
postProcessors.push(new RemoveWhitespace());
|
postProcessors.push(new RemoveWhitespace());
|
||||||
|
|
||||||
const llm_manager = new LLMManager({
|
const llm_manager = new LLMManager(settings)
|
||||||
deepseek: settings.deepseekApiKey,
|
const model = {
|
||||||
openai: settings.openAIApiKey,
|
provider: settings.applyModelProvider,
|
||||||
anthropic: settings.anthropicApiKey,
|
modelId: settings.applyModelId,
|
||||||
gemini: settings.geminiApiKey,
|
}
|
||||||
groq: settings.groqApiKey,
|
|
||||||
infio: settings.infioApiKey,
|
|
||||||
})
|
|
||||||
const model = settings.activeModels.find(
|
|
||||||
(option) => option.name === settings.chatModelId,
|
|
||||||
) as CustomLLMModel;
|
|
||||||
const llm = new LLMClient(llm_manager, model);
|
const llm = new LLMClient(llm_manager, model);
|
||||||
|
|
||||||
return new AutoComplete(
|
return new AutoComplete(
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import {
|
|||||||
TextBlockParam,
|
TextBlockParam,
|
||||||
} from '@anthropic-ai/sdk/resources/messages'
|
} from '@anthropic-ai/sdk/resources/messages'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -36,21 +36,14 @@ export class AnthropicProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'Anthropic API key is missing. Please set it in settings menu.',
|
||||||
'Anthropic API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.client = new Anthropic({
|
|
||||||
baseURL: model.baseUrl,
|
|
||||||
apiKey: model.apiKey,
|
|
||||||
dangerouslyAllowBrowser: true
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const systemMessage = AnthropicProvider.validateSystemMessages(
|
const systemMessage = AnthropicProvider.validateSystemMessages(
|
||||||
@ -89,21 +82,14 @@ export class AnthropicProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'Anthropic API key is missing. Please set it in settings menu.',
|
||||||
'Anthropic API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.client = new Anthropic({
|
|
||||||
baseURL: model.baseUrl,
|
|
||||||
apiKey: model.apiKey,
|
|
||||||
dangerouslyAllowBrowser: true
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const systemMessage = AnthropicProvider.validateSystemMessages(
|
const systemMessage = AnthropicProvider.validateSystemMessages(
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -11,12 +11,12 @@ import {
|
|||||||
|
|
||||||
export type BaseLLMProvider = {
|
export type BaseLLMProvider = {
|
||||||
generateResponse(
|
generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming>
|
): Promise<LLMResponseNonStreaming>
|
||||||
streamResponse(
|
streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>>
|
): Promise<AsyncIterable<LLMResponseStreaming>>
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import {
|
|||||||
Part,
|
Part,
|
||||||
} from '@google/generative-ai'
|
} from '@google/generative-ai'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -43,18 +43,14 @@ export class GeminiProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!this.apiKey) {
|
if (!this.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
`Gemini API key is missing. Please set it in settings menu.`,
|
||||||
`Gemini API key is missing. Please set it in settings menu.`,
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.apiKey = model.apiKey
|
|
||||||
this.client = new GoogleGenerativeAI(model.apiKey)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const systemMessages = request.messages.filter((m) => m.role === 'system')
|
const systemMessages = request.messages.filter((m) => m.role === 'system')
|
||||||
@ -110,18 +106,14 @@ export class GeminiProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!this.apiKey) {
|
if (!this.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
`Gemini API key is missing. Please set it in settings menu.`,
|
||||||
`Gemini API key is missing. Please set it in settings menu.`,
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.apiKey = model.apiKey
|
|
||||||
this.client = new GoogleGenerativeAI(model.apiKey)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const systemMessages = request.messages.filter((m) => m.role === 'system')
|
const systemMessages = request.messages.filter((m) => m.role === 'system')
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import {
|
|||||||
ChatCompletionMessageParam,
|
ChatCompletionMessageParam,
|
||||||
} from 'groq-sdk/resources/chat/completions'
|
} from 'groq-sdk/resources/chat/completions'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -35,20 +35,14 @@ export class GroqProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'Groq API key is missing. Please set it in settings menu.',
|
||||||
'Groq API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.client = new Groq({
|
|
||||||
apiKey: model.apiKey,
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -78,20 +72,14 @@ export class GroqProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.apiKey) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'Groq API key is missing. Please set it in settings menu.',
|
||||||
'Groq API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
this.client = new Groq({
|
|
||||||
apiKey: model.apiKey,
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -4,12 +4,12 @@ import {
|
|||||||
ChatCompletionChunk,
|
ChatCompletionChunk,
|
||||||
} from 'openai/resources/chat/completions'
|
} from 'openai/resources/chat/completions'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { INFIO_BASE_URL } from '../../constants'
|
||||||
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
LLMRequestStreaming,
|
LLMRequestStreaming,
|
||||||
RequestMessage,
|
RequestMessage
|
||||||
} from '../../types/llm/request'
|
} from '../../types/llm/request'
|
||||||
import {
|
import {
|
||||||
LLMResponseNonStreaming,
|
LLMResponseNonStreaming,
|
||||||
@ -85,13 +85,13 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
// this.client = new OpenAI({ apiKey, dangerouslyAllowBrowser: true })
|
// this.client = new OpenAI({ apiKey, dangerouslyAllowBrowser: true })
|
||||||
// this.adapter = new OpenAIMessageAdapter()
|
// this.adapter = new OpenAIMessageAdapter()
|
||||||
this.apiKey = apiKey
|
this.apiKey = apiKey
|
||||||
this.baseUrl = 'https://api.infio.com/api/raw_message'
|
this.baseUrl = INFIO_BASE_URL
|
||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
// options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!this.apiKey) {
|
if (!this.apiKey) {
|
||||||
throw new LLMAPIKeyNotSetException(
|
throw new LLMAPIKeyNotSetException(
|
||||||
@ -107,7 +107,7 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
presence_penalty: request.presence_penalty,
|
presence_penalty: request.presence_penalty,
|
||||||
max_tokens: request.max_tokens,
|
max_tokens: request.max_tokens,
|
||||||
}
|
}
|
||||||
const options = {
|
const req_options = {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: this.apiKey,
|
Authorization: this.apiKey,
|
||||||
@ -117,7 +117,7 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
body: JSON.stringify(req)
|
body: JSON.stringify(req)
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = await fetch(this.baseUrl, options);
|
const response = await fetch(this.baseUrl, req_options);
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
}
|
}
|
||||||
@ -134,9 +134,8 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!this.apiKey) {
|
if (!this.apiKey) {
|
||||||
throw new LLMAPIKeyNotSetException(
|
throw new LLMAPIKeyNotSetException(
|
||||||
@ -154,7 +153,7 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
presence_penalty: request.presence_penalty,
|
presence_penalty: request.presence_penalty,
|
||||||
max_tokens: request.max_tokens,
|
max_tokens: request.max_tokens,
|
||||||
}
|
}
|
||||||
const options = {
|
const req_options = {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: this.apiKey,
|
Authorization: this.apiKey,
|
||||||
@ -164,7 +163,7 @@ export class InfioProvider implements BaseLLMProvider {
|
|||||||
body: JSON.stringify(req)
|
body: JSON.stringify(req)
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = await fetch(this.baseUrl, options);
|
const response = await fetch(this.baseUrl, req_options);
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,14 +1,15 @@
|
|||||||
import { DEEPSEEK_BASE_URL } from '../../constants'
|
import { ALIBABA_QWEN_BASE_URL, DEEPSEEK_BASE_URL, OPENROUTER_BASE_URL, SILICONFLOW_BASE_URL } from '../../constants'
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { ApiProvider, LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
LLMRequestStreaming,
|
LLMRequestStreaming,
|
||||||
} from '../../types/llm/request'
|
} from '../../types/llm/request'
|
||||||
import {
|
import {
|
||||||
LLMResponseNonStreaming,
|
LLMResponseNonStreaming,
|
||||||
LLMResponseStreaming,
|
LLMResponseStreaming,
|
||||||
} from '../../types/llm/response'
|
} from '../../types/llm/response'
|
||||||
|
import { InfioSettings } from '../../types/settings'
|
||||||
|
|
||||||
import { AnthropicProvider } from './anthropic'
|
import { AnthropicProvider } from './anthropic'
|
||||||
import { GeminiProvider } from './gemini'
|
import { GeminiProvider } from './gemini'
|
||||||
@ -20,123 +21,147 @@ import { OpenAICompatibleProvider } from './openai-compatible-provider'
|
|||||||
|
|
||||||
|
|
||||||
export type LLMManagerInterface = {
|
export type LLMManagerInterface = {
|
||||||
generateResponse(
|
generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming>
|
): Promise<LLMResponseNonStreaming>
|
||||||
streamResponse(
|
streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>>
|
): Promise<AsyncIterable<LLMResponseStreaming>>
|
||||||
}
|
}
|
||||||
|
|
||||||
class LLMManager implements LLMManagerInterface {
|
class LLMManager implements LLMManagerInterface {
|
||||||
private openaiProvider: OpenAIAuthenticatedProvider
|
private openaiProvider: OpenAIAuthenticatedProvider
|
||||||
private deepseekProvider: OpenAICompatibleProvider
|
private deepseekProvider: OpenAICompatibleProvider
|
||||||
private anthropicProvider: AnthropicProvider
|
private anthropicProvider: AnthropicProvider
|
||||||
private googleProvider: GeminiProvider
|
private googleProvider: GeminiProvider
|
||||||
private groqProvider: GroqProvider
|
private groqProvider: GroqProvider
|
||||||
private infioProvider: InfioProvider
|
private infioProvider: InfioProvider
|
||||||
private ollamaProvider: OllamaProvider
|
private openrouterProvider: OpenAICompatibleProvider
|
||||||
private isInfioEnabled: boolean
|
private siliconflowProvider: OpenAICompatibleProvider
|
||||||
|
private alibabaQwenProvider: OpenAICompatibleProvider
|
||||||
|
private ollamaProvider: OllamaProvider
|
||||||
|
private isInfioEnabled: boolean
|
||||||
|
|
||||||
constructor(apiKeys: {
|
constructor(settings: InfioSettings) {
|
||||||
deepseek?: string
|
this.infioProvider = new InfioProvider(settings.infioProvider.apiKey)
|
||||||
openai?: string
|
this.openrouterProvider = new OpenAICompatibleProvider(settings.openrouterProvider.apiKey, OPENROUTER_BASE_URL)
|
||||||
anthropic?: string
|
this.siliconflowProvider = new OpenAICompatibleProvider(settings.siliconflowProvider.apiKey, SILICONFLOW_BASE_URL)
|
||||||
gemini?: string
|
this.alibabaQwenProvider = new OpenAICompatibleProvider(settings.alibabaQwenProvider.apiKey, ALIBABA_QWEN_BASE_URL)
|
||||||
groq?: string
|
this.deepseekProvider = new OpenAICompatibleProvider(settings.deepseekProvider.apiKey, DEEPSEEK_BASE_URL)
|
||||||
infio?: string
|
this.openaiProvider = new OpenAIAuthenticatedProvider(settings.openaiProvider.apiKey)
|
||||||
}) {
|
this.anthropicProvider = new AnthropicProvider(settings.anthropicProvider.apiKey)
|
||||||
this.deepseekProvider = new OpenAICompatibleProvider(apiKeys.deepseek ?? '', DEEPSEEK_BASE_URL)
|
this.googleProvider = new GeminiProvider(settings.googleProvider.apiKey)
|
||||||
this.openaiProvider = new OpenAIAuthenticatedProvider(apiKeys.openai ?? '')
|
this.groqProvider = new GroqProvider(settings.groqProvider.apiKey)
|
||||||
this.anthropicProvider = new AnthropicProvider(apiKeys.anthropic ?? '')
|
this.ollamaProvider = new OllamaProvider(settings.groqProvider.baseUrl)
|
||||||
this.googleProvider = new GeminiProvider(apiKeys.gemini ?? '')
|
this.isInfioEnabled = !!settings.infioProvider.apiKey
|
||||||
this.groqProvider = new GroqProvider(apiKeys.groq ?? '')
|
}
|
||||||
this.infioProvider = new InfioProvider(apiKeys.infio ?? '')
|
|
||||||
this.ollamaProvider = new OllamaProvider()
|
|
||||||
this.isInfioEnabled = !!apiKeys.infio
|
|
||||||
}
|
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (this.isInfioEnabled) {
|
if (this.isInfioEnabled) {
|
||||||
return await this.infioProvider.generateResponse(
|
return await this.infioProvider.generateResponse(
|
||||||
model,
|
model,
|
||||||
request,
|
request,
|
||||||
options,
|
)
|
||||||
)
|
}
|
||||||
}
|
// use custom provider
|
||||||
// use custom provider
|
switch (model.provider) {
|
||||||
switch (model.provider) {
|
case ApiProvider.OpenRouter:
|
||||||
case 'deepseek':
|
return await this.openrouterProvider.generateResponse(
|
||||||
return await this.deepseekProvider.generateResponse(
|
model,
|
||||||
model,
|
request,
|
||||||
request,
|
options,
|
||||||
options,
|
)
|
||||||
)
|
case ApiProvider.SiliconFlow:
|
||||||
case 'openai':
|
return await this.siliconflowProvider.generateResponse(
|
||||||
return await this.openaiProvider.generateResponse(
|
model,
|
||||||
model,
|
request,
|
||||||
request,
|
options,
|
||||||
options,
|
)
|
||||||
)
|
case ApiProvider.AlibabaQwen:
|
||||||
case 'anthropic':
|
return await this.alibabaQwenProvider.generateResponse(
|
||||||
return await this.anthropicProvider.generateResponse(
|
model,
|
||||||
model,
|
request,
|
||||||
request,
|
options,
|
||||||
options,
|
)
|
||||||
)
|
case ApiProvider.Deepseek:
|
||||||
case 'google':
|
return await this.deepseekProvider.generateResponse(
|
||||||
return await this.googleProvider.generateResponse(
|
model,
|
||||||
model,
|
request,
|
||||||
request,
|
options,
|
||||||
options,
|
)
|
||||||
)
|
case ApiProvider.OpenAI:
|
||||||
case 'groq':
|
return await this.openaiProvider.generateResponse(
|
||||||
return await this.groqProvider.generateResponse(model, request, options)
|
model,
|
||||||
case 'ollama':
|
request,
|
||||||
return await this.ollamaProvider.generateResponse(
|
options,
|
||||||
model,
|
)
|
||||||
request,
|
case ApiProvider.Anthropic:
|
||||||
options,
|
return await this.anthropicProvider.generateResponse(
|
||||||
)
|
model,
|
||||||
}
|
request,
|
||||||
}
|
options,
|
||||||
|
)
|
||||||
|
case ApiProvider.Google:
|
||||||
|
return await this.googleProvider.generateResponse(
|
||||||
|
model,
|
||||||
|
request,
|
||||||
|
options,
|
||||||
|
)
|
||||||
|
case ApiProvider.Groq:
|
||||||
|
return await this.groqProvider.generateResponse(model, request, options)
|
||||||
|
case ApiProvider.Ollama:
|
||||||
|
return await this.ollamaProvider.generateResponse(
|
||||||
|
model,
|
||||||
|
request,
|
||||||
|
options,
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported model provider: ${model.provider}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (this.isInfioEnabled) {
|
if (this.isInfioEnabled) {
|
||||||
return await this.infioProvider.streamResponse(model, request, options)
|
return await this.infioProvider.streamResponse(model, request)
|
||||||
}
|
}
|
||||||
// use custom provider
|
// use custom provider
|
||||||
switch (model.provider) {
|
switch (model.provider) {
|
||||||
case 'deepseek':
|
case ApiProvider.OpenRouter:
|
||||||
return await this.deepseekProvider.streamResponse(model, request, options)
|
return await this.openrouterProvider.streamResponse(model, request, options)
|
||||||
case 'openai':
|
case ApiProvider.SiliconFlow:
|
||||||
return await this.openaiProvider.streamResponse(model, request, options)
|
return await this.siliconflowProvider.streamResponse(model, request, options)
|
||||||
case 'anthropic':
|
case ApiProvider.AlibabaQwen:
|
||||||
return await this.anthropicProvider.streamResponse(
|
return await this.alibabaQwenProvider.streamResponse(model, request, options)
|
||||||
model,
|
case ApiProvider.Deepseek:
|
||||||
request,
|
return await this.deepseekProvider.streamResponse(model, request, options)
|
||||||
options,
|
case ApiProvider.OpenAI:
|
||||||
)
|
return await this.openaiProvider.streamResponse(model, request, options)
|
||||||
case 'google':
|
case ApiProvider.Anthropic:
|
||||||
return await this.googleProvider.streamResponse(model, request, options)
|
return await this.anthropicProvider.streamResponse(
|
||||||
case 'groq':
|
model,
|
||||||
return await this.groqProvider.streamResponse(model, request, options)
|
request,
|
||||||
case 'ollama':
|
options,
|
||||||
return await this.ollamaProvider.streamResponse(model, request, options)
|
)
|
||||||
}
|
case ApiProvider.Google:
|
||||||
}
|
return await this.googleProvider.streamResponse(model, request, options)
|
||||||
|
case ApiProvider.Groq:
|
||||||
|
return await this.groqProvider.streamResponse(model, request, options)
|
||||||
|
case ApiProvider.Ollama:
|
||||||
|
return await this.ollamaProvider.streamResponse(model, request, options)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default LLMManager
|
export default LLMManager
|
||||||
|
|||||||
@ -7,7 +7,7 @@
|
|||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
import { FinalRequestOptions } from 'openai/core'
|
import { FinalRequestOptions } from 'openai/core'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -19,7 +19,7 @@ import {
|
|||||||
} from '../../types/llm/response'
|
} from '../../types/llm/response'
|
||||||
|
|
||||||
import { BaseLLMProvider } from './base'
|
import { BaseLLMProvider } from './base'
|
||||||
import { LLMBaseUrlNotSetException, LLMModelNotSetException } from './exception'
|
import { LLMBaseUrlNotSetException } from './exception'
|
||||||
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
||||||
|
|
||||||
export class NoStainlessOpenAI extends OpenAI {
|
export class NoStainlessOpenAI extends OpenAI {
|
||||||
@ -35,7 +35,7 @@ export class NoStainlessOpenAI extends OpenAI {
|
|||||||
{ retryCount = 0 }: { retryCount?: number } = {},
|
{ retryCount = 0 }: { retryCount?: number } = {},
|
||||||
): { req: RequestInit; url: string; timeout: number } {
|
): { req: RequestInit; url: string; timeout: number } {
|
||||||
const req = super.buildRequest(options, { retryCount })
|
const req = super.buildRequest(options, { retryCount })
|
||||||
const headers = req.req.headers as Record<string, string>
|
const headers: Record<string, string> = req.req.headers
|
||||||
Object.keys(headers).forEach((k) => {
|
Object.keys(headers).forEach((k) => {
|
||||||
if (k.startsWith('x-stainless')) {
|
if (k.startsWith('x-stainless')) {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||||
@ -48,30 +48,26 @@ export class NoStainlessOpenAI extends OpenAI {
|
|||||||
|
|
||||||
export class OllamaProvider implements BaseLLMProvider {
|
export class OllamaProvider implements BaseLLMProvider {
|
||||||
private adapter: OpenAIMessageAdapter
|
private adapter: OpenAIMessageAdapter
|
||||||
|
private baseUrl: string
|
||||||
|
|
||||||
constructor() {
|
constructor(baseUrl: string) {
|
||||||
this.adapter = new OpenAIMessageAdapter()
|
this.adapter = new OpenAIMessageAdapter()
|
||||||
|
this.baseUrl = baseUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!model.baseUrl) {
|
if (!this.baseUrl) {
|
||||||
throw new LLMBaseUrlNotSetException(
|
throw new LLMBaseUrlNotSetException(
|
||||||
'Ollama base URL is missing. Please set it in settings menu.',
|
'Ollama base URL is missing. Please set it in settings menu.',
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!model.name) {
|
|
||||||
throw new LLMModelNotSetException(
|
|
||||||
'Ollama model is missing. Please set it in settings menu.',
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new NoStainlessOpenAI({
|
const client = new NoStainlessOpenAI({
|
||||||
baseURL: `${model.baseUrl}/v1`,
|
baseURL: `${this.baseUrl}/v1`,
|
||||||
apiKey: '',
|
apiKey: '',
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
})
|
})
|
||||||
@ -79,24 +75,18 @@ export class OllamaProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!model.baseUrl) {
|
if (!this.baseUrl) {
|
||||||
throw new LLMBaseUrlNotSetException(
|
throw new LLMBaseUrlNotSetException(
|
||||||
'Ollama base URL is missing. Please set it in settings menu.',
|
'Ollama base URL is missing. Please set it in settings menu.',
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!model.name) {
|
|
||||||
throw new LLMModelNotSetException(
|
|
||||||
'Ollama model is missing. Please set it in settings menu.',
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new NoStainlessOpenAI({
|
const client = new NoStainlessOpenAI({
|
||||||
baseURL: `${model.baseUrl}/v1`,
|
baseURL: `${this.baseUrl}/v1`,
|
||||||
apiKey: '',
|
apiKey: '',
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
@ -33,7 +33,7 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
@ -47,7 +47,7 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
|
|||||||
@ -1,91 +1,77 @@
|
|||||||
import OpenAI from 'openai'
|
import OpenAI from 'openai'
|
||||||
|
|
||||||
import { CustomLLMModel } from '../../types/llm/model'
|
import { LLMModel } from '../../types/llm/model'
|
||||||
import {
|
import {
|
||||||
LLMOptions,
|
LLMOptions,
|
||||||
LLMRequestNonStreaming,
|
LLMRequestNonStreaming,
|
||||||
LLMRequestStreaming,
|
LLMRequestStreaming,
|
||||||
} from '../../types/llm/request'
|
} from '../../types/llm/request'
|
||||||
import {
|
import {
|
||||||
LLMResponseNonStreaming,
|
LLMResponseNonStreaming,
|
||||||
LLMResponseStreaming,
|
LLMResponseStreaming,
|
||||||
} from '../../types/llm/response'
|
} from '../../types/llm/response'
|
||||||
|
|
||||||
import { BaseLLMProvider } from './base'
|
import { BaseLLMProvider } from './base'
|
||||||
import {
|
import {
|
||||||
LLMAPIKeyInvalidException,
|
LLMAPIKeyInvalidException,
|
||||||
LLMAPIKeyNotSetException,
|
LLMAPIKeyNotSetException,
|
||||||
} from './exception'
|
} from './exception'
|
||||||
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
import { OpenAIMessageAdapter } from './openai-message-adapter'
|
||||||
|
|
||||||
export class OpenAIAuthenticatedProvider implements BaseLLMProvider {
|
export class OpenAIAuthenticatedProvider implements BaseLLMProvider {
|
||||||
private adapter: OpenAIMessageAdapter
|
private adapter: OpenAIMessageAdapter
|
||||||
private client: OpenAI
|
private client: OpenAI
|
||||||
|
|
||||||
constructor(apiKey: string) {
|
constructor(apiKey: string) {
|
||||||
this.client = new OpenAI({
|
this.client = new OpenAI({
|
||||||
apiKey,
|
apiKey,
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
})
|
})
|
||||||
this.adapter = new OpenAIMessageAdapter()
|
this.adapter = new OpenAIMessageAdapter()
|
||||||
}
|
}
|
||||||
|
|
||||||
async generateResponse(
|
async generateResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestNonStreaming,
|
request: LLMRequestNonStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<LLMResponseNonStreaming> {
|
): Promise<LLMResponseNonStreaming> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.baseUrl) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'OpenAI API key is missing. Please set it in settings menu.',
|
||||||
'OpenAI API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
}
|
||||||
}
|
try {
|
||||||
this.client = new OpenAI({
|
return this.adapter.generateResponse(this.client, request, options)
|
||||||
apiKey: model.apiKey,
|
} catch (error) {
|
||||||
baseURL: model.baseUrl,
|
if (error instanceof OpenAI.AuthenticationError) {
|
||||||
dangerouslyAllowBrowser: true,
|
throw new LLMAPIKeyInvalidException(
|
||||||
})
|
'OpenAI API key is invalid. Please update it in settings menu.',
|
||||||
}
|
)
|
||||||
try {
|
}
|
||||||
return this.adapter.generateResponse(this.client, request, options)
|
throw error
|
||||||
} catch (error) {
|
}
|
||||||
if (error instanceof OpenAI.AuthenticationError) {
|
}
|
||||||
throw new LLMAPIKeyInvalidException(
|
|
||||||
'OpenAI API key is invalid. Please update it in settings menu.',
|
|
||||||
)
|
|
||||||
}
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async streamResponse(
|
async streamResponse(
|
||||||
model: CustomLLMModel,
|
model: LLMModel,
|
||||||
request: LLMRequestStreaming,
|
request: LLMRequestStreaming,
|
||||||
options?: LLMOptions,
|
options?: LLMOptions,
|
||||||
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
): Promise<AsyncIterable<LLMResponseStreaming>> {
|
||||||
if (!this.client.apiKey) {
|
if (!this.client.apiKey) {
|
||||||
if (!model.baseUrl) {
|
throw new LLMAPIKeyNotSetException(
|
||||||
throw new LLMAPIKeyNotSetException(
|
'OpenAI API key is missing. Please set it in settings menu.',
|
||||||
'OpenAI API key is missing. Please set it in settings menu.',
|
)
|
||||||
)
|
}
|
||||||
}
|
|
||||||
this.client = new OpenAI({
|
|
||||||
apiKey: model.apiKey,
|
|
||||||
baseURL: model.baseUrl,
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return this.adapter.streamResponse(this.client, request, options)
|
return this.adapter.streamResponse(this.client, request, options)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error instanceof OpenAI.AuthenticationError) {
|
if (error instanceof OpenAI.AuthenticationError) {
|
||||||
throw new LLMAPIKeyInvalidException(
|
throw new LLMAPIKeyInvalidException(
|
||||||
'OpenAI API key is invalid. Please update it in settings menu.',
|
'OpenAI API key is invalid. Please update it in settings menu.',
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
throw error
|
throw error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,7 +1,11 @@
|
|||||||
import { GoogleGenerativeAI } from '@google/generative-ai'
|
import { GoogleGenerativeAI } from '@google/generative-ai'
|
||||||
import { OpenAI } from 'openai'
|
import { OpenAI } from 'openai'
|
||||||
|
|
||||||
|
import { ALIBABA_QWEN_BASE_URL, OPENAI_BASE_URL, SILICONFLOW_BASE_URL } from "../../constants"
|
||||||
import { EmbeddingModel } from '../../types/embedding'
|
import { EmbeddingModel } from '../../types/embedding'
|
||||||
|
import { ApiProvider } from '../../types/llm/model'
|
||||||
|
import { InfioSettings } from '../../types/settings'
|
||||||
|
import { GetEmbeddingModelInfo } from '../../utils/api'
|
||||||
import {
|
import {
|
||||||
LLMAPIKeyNotSetException,
|
LLMAPIKeyNotSetException,
|
||||||
LLMBaseUrlNotSetException,
|
LLMBaseUrlNotSetException,
|
||||||
@ -10,22 +14,20 @@ import {
|
|||||||
import { NoStainlessOpenAI } from '../llm/ollama'
|
import { NoStainlessOpenAI } from '../llm/ollama'
|
||||||
|
|
||||||
export const getEmbeddingModel = (
|
export const getEmbeddingModel = (
|
||||||
embeddingModelId: string,
|
settings: InfioSettings,
|
||||||
apiKeys: {
|
|
||||||
openAIApiKey: string
|
|
||||||
geminiApiKey: string
|
|
||||||
},
|
|
||||||
ollamaBaseUrl: string,
|
|
||||||
): EmbeddingModel => {
|
): EmbeddingModel => {
|
||||||
switch (embeddingModelId) {
|
switch (settings.embeddingModelProvider) {
|
||||||
case 'text-embedding-3-small': {
|
case ApiProvider.OpenAI: {
|
||||||
|
const baseURL = settings.openaiProvider.useCustomUrl ? settings.openaiProvider.baseUrl : OPENAI_BASE_URL
|
||||||
const openai = new OpenAI({
|
const openai = new OpenAI({
|
||||||
apiKey: apiKeys.openAIApiKey,
|
apiKey: settings.openaiProvider.apiKey,
|
||||||
|
baseURL: baseURL,
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
})
|
})
|
||||||
|
const modelInfo = GetEmbeddingModelInfo(settings.embeddingModelProvider, settings.embeddingModelId)
|
||||||
return {
|
return {
|
||||||
id: 'text-embedding-3-small',
|
id: settings.embeddingModelId,
|
||||||
dimension: 1536,
|
dimension: modelInfo.dimensions,
|
||||||
getEmbedding: async (text: string) => {
|
getEmbedding: async (text: string) => {
|
||||||
try {
|
try {
|
||||||
if (!openai.apiKey) {
|
if (!openai.apiKey) {
|
||||||
@ -34,7 +36,7 @@ export const getEmbeddingModel = (
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
const embedding = await openai.embeddings.create({
|
const embedding = await openai.embeddings.create({
|
||||||
model: 'text-embedding-3-small',
|
model: settings.embeddingModelId,
|
||||||
input: text,
|
input: text,
|
||||||
})
|
})
|
||||||
return embedding.data[0].embedding
|
return embedding.data[0].embedding
|
||||||
@ -52,12 +54,87 @@ export const getEmbeddingModel = (
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'text-embedding-004': {
|
case ApiProvider.SiliconFlow: {
|
||||||
const client = new GoogleGenerativeAI(apiKeys.geminiApiKey)
|
const baseURL = settings.siliconflowProvider.useCustomUrl ? settings.siliconflowProvider.baseUrl : SILICONFLOW_BASE_URL
|
||||||
const model = client.getGenerativeModel({ model: 'text-embedding-004' })
|
const openai = new OpenAI({
|
||||||
|
apiKey: settings.siliconflowProvider.apiKey,
|
||||||
|
baseURL: baseURL,
|
||||||
|
dangerouslyAllowBrowser: true,
|
||||||
|
})
|
||||||
|
const modelInfo = GetEmbeddingModelInfo(settings.embeddingModelProvider, settings.embeddingModelId)
|
||||||
return {
|
return {
|
||||||
id: 'text-embedding-004',
|
id: settings.embeddingModelId,
|
||||||
dimension: 768,
|
dimension: modelInfo.dimensions,
|
||||||
|
getEmbedding: async (text: string) => {
|
||||||
|
try {
|
||||||
|
if (!openai.apiKey) {
|
||||||
|
throw new LLMAPIKeyNotSetException(
|
||||||
|
'SiliconFlow API key is missing. Please set it in settings menu.',
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const embedding = await openai.embeddings.create({
|
||||||
|
model: settings.embeddingModelId,
|
||||||
|
input: text,
|
||||||
|
})
|
||||||
|
return embedding.data[0].embedding
|
||||||
|
} catch (error) {
|
||||||
|
if (
|
||||||
|
error.status === 429 &&
|
||||||
|
error.message.toLowerCase().includes('rate limit')
|
||||||
|
) {
|
||||||
|
throw new LLMRateLimitExceededException(
|
||||||
|
'SiliconFlow API rate limit exceeded. Please try again later.',
|
||||||
|
)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case ApiProvider.AlibabaQwen: {
|
||||||
|
const baseURL = settings.alibabaQwenProvider.useCustomUrl ? settings.alibabaQwenProvider.baseUrl : ALIBABA_QWEN_BASE_URL
|
||||||
|
const openai = new OpenAI({
|
||||||
|
apiKey: settings.alibabaQwenProvider.apiKey,
|
||||||
|
baseURL: baseURL,
|
||||||
|
dangerouslyAllowBrowser: true,
|
||||||
|
})
|
||||||
|
const modelInfo = GetEmbeddingModelInfo(settings.embeddingModelProvider, settings.embeddingModelId)
|
||||||
|
return {
|
||||||
|
id: settings.embeddingModelId,
|
||||||
|
dimension: modelInfo.dimensions,
|
||||||
|
getEmbedding: async (text: string) => {
|
||||||
|
try {
|
||||||
|
if (!openai.apiKey) {
|
||||||
|
throw new LLMAPIKeyNotSetException(
|
||||||
|
'Alibaba Qwen API key is missing. Please set it in settings menu.',
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const embedding = await openai.embeddings.create({
|
||||||
|
model: settings.embeddingModelId,
|
||||||
|
input: text,
|
||||||
|
})
|
||||||
|
return embedding.data[0].embedding
|
||||||
|
} catch (error) {
|
||||||
|
if (
|
||||||
|
error.status === 429 &&
|
||||||
|
error.message.toLowerCase().includes('rate limit')
|
||||||
|
) {
|
||||||
|
throw new LLMRateLimitExceededException(
|
||||||
|
'Alibaba Qwen API rate limit exceeded. Please try again later.',
|
||||||
|
)
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case ApiProvider.Google: {
|
||||||
|
const client = new GoogleGenerativeAI(settings.googleProvider.apiKey)
|
||||||
|
const model = client.getGenerativeModel({ model: settings.embeddingModelId })
|
||||||
|
const modelInfo = GetEmbeddingModelInfo(settings.embeddingModelProvider, settings.embeddingModelId)
|
||||||
|
return {
|
||||||
|
id: settings.embeddingModelId,
|
||||||
|
dimension: modelInfo.dimensions,
|
||||||
getEmbedding: async (text: string) => {
|
getEmbedding: async (text: string) => {
|
||||||
try {
|
try {
|
||||||
const response = await model.embedContent(text)
|
const response = await model.embedContent(text)
|
||||||
@ -76,69 +153,24 @@ export const getEmbeddingModel = (
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case 'nomic-embed-text': {
|
case ApiProvider.Ollama: {
|
||||||
const openai = new NoStainlessOpenAI({
|
const openai = new NoStainlessOpenAI({
|
||||||
apiKey: '',
|
apiKey: settings.ollamaProvider.apiKey,
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
baseURL: `${ollamaBaseUrl}/v1`,
|
baseURL: `${settings.ollamaProvider.baseUrl}/v1`,
|
||||||
})
|
})
|
||||||
|
const modelInfo = GetEmbeddingModelInfo(settings.embeddingModelProvider, settings.embeddingModelId)
|
||||||
return {
|
return {
|
||||||
id: 'nomic-embed-text',
|
id: settings.embeddingModelId,
|
||||||
dimension: 768,
|
dimension: modelInfo.dimensions,
|
||||||
getEmbedding: async (text: string) => {
|
getEmbedding: async (text: string) => {
|
||||||
if (!ollamaBaseUrl) {
|
if (!settings.ollamaProvider.baseUrl) {
|
||||||
throw new LLMBaseUrlNotSetException(
|
throw new LLMBaseUrlNotSetException(
|
||||||
'Ollama Address is missing. Please set it in settings menu.',
|
'Ollama Address is missing. Please set it in settings menu.',
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const embedding = await openai.embeddings.create({
|
const embedding = await openai.embeddings.create({
|
||||||
model: 'nomic-embed-text',
|
model: settings.embeddingModelId,
|
||||||
input: text,
|
|
||||||
})
|
|
||||||
return embedding.data[0].embedding
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case 'mxbai-embed-large': {
|
|
||||||
const openai = new NoStainlessOpenAI({
|
|
||||||
apiKey: '',
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
baseURL: `${ollamaBaseUrl}/v1`,
|
|
||||||
})
|
|
||||||
return {
|
|
||||||
id: 'mxbai-embed-large',
|
|
||||||
dimension: 1024,
|
|
||||||
getEmbedding: async (text: string) => {
|
|
||||||
if (!ollamaBaseUrl) {
|
|
||||||
throw new LLMBaseUrlNotSetException(
|
|
||||||
'Ollama Address is missing. Please set it in settings menu.',
|
|
||||||
)
|
|
||||||
}
|
|
||||||
const embedding = await openai.embeddings.create({
|
|
||||||
model: 'mxbai-embed-large',
|
|
||||||
input: text,
|
|
||||||
})
|
|
||||||
return embedding.data[0].embedding
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
case 'bge-m3': {
|
|
||||||
const openai = new NoStainlessOpenAI({
|
|
||||||
apiKey: '',
|
|
||||||
dangerouslyAllowBrowser: true,
|
|
||||||
baseURL: `${ollamaBaseUrl}/v1`,
|
|
||||||
})
|
|
||||||
return {
|
|
||||||
id: 'bge-m3',
|
|
||||||
dimension: 1024,
|
|
||||||
getEmbedding: async (text: string) => {
|
|
||||||
if (!ollamaBaseUrl) {
|
|
||||||
throw new LLMBaseUrlNotSetException(
|
|
||||||
'Ollama Address is missing. Please set it in settings menu.',
|
|
||||||
)
|
|
||||||
}
|
|
||||||
const embedding = await openai.embeddings.create({
|
|
||||||
model: 'bge-m3',
|
|
||||||
input: text,
|
input: text,
|
||||||
})
|
})
|
||||||
return embedding.data[0].embedding
|
return embedding.data[0].embedding
|
||||||
|
|||||||
@ -23,26 +23,12 @@ export class RAGEngine {
|
|||||||
this.app = app
|
this.app = app
|
||||||
this.settings = settings
|
this.settings = settings
|
||||||
this.vectorManager = dbManager.getVectorManager()
|
this.vectorManager = dbManager.getVectorManager()
|
||||||
this.embeddingModel = getEmbeddingModel(
|
this.embeddingModel = getEmbeddingModel(settings)
|
||||||
settings.embeddingModelId,
|
|
||||||
{
|
|
||||||
openAIApiKey: settings.openAIApiKey,
|
|
||||||
geminiApiKey: settings.geminiApiKey,
|
|
||||||
},
|
|
||||||
settings.ollamaEmbeddingModel.baseUrl,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setSettings(settings: InfioSettings) {
|
setSettings(settings: InfioSettings) {
|
||||||
this.settings = settings
|
this.settings = settings
|
||||||
this.embeddingModel = getEmbeddingModel(
|
this.embeddingModel = getEmbeddingModel(settings)
|
||||||
settings.embeddingModelId,
|
|
||||||
{
|
|
||||||
openAIApiKey: settings.openAIApiKey,
|
|
||||||
geminiApiKey: settings.geminiApiKey,
|
|
||||||
},
|
|
||||||
settings.ollamaEmbeddingModel.baseUrl,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Implement automatic vault re-indexing when settings are changed.
|
// TODO: Implement automatic vault re-indexing when settings are changed.
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import { SerializedLexicalNode } from 'lexical'
|
import { SerializedLexicalNode } from 'lexical'
|
||||||
|
|
||||||
import { SUPPORT_EMBEDDING_SIMENTION } from '../constants'
|
import { SUPPORT_EMBEDDING_SIMENTION } from '../constants'
|
||||||
import { EmbeddingModelId } from '../types/embedding'
|
// import { EmbeddingModelId } from '../types/embedding'
|
||||||
|
|
||||||
// PostgreSQL column types
|
// PostgreSQL column types
|
||||||
type ColumnDefinition = {
|
type ColumnDefinition = {
|
||||||
|
|||||||
@ -30,6 +30,7 @@ import { getMentionableBlockData } from './utils/obsidian'
|
|||||||
// Remember to rename these classes and interfaces!
|
// Remember to rename these classes and interfaces!
|
||||||
export default class InfioPlugin extends Plugin {
|
export default class InfioPlugin extends Plugin {
|
||||||
settings: InfioSettings
|
settings: InfioSettings
|
||||||
|
settingTab: InfioSettingTab
|
||||||
settingsListeners: ((newSettings: InfioSettings) => void)[] = []
|
settingsListeners: ((newSettings: InfioSettings) => void)[] = []
|
||||||
initChatProps?: ChatProps
|
initChatProps?: ChatProps
|
||||||
dbManager: DBManager | null = null
|
dbManager: DBManager | null = null
|
||||||
@ -41,6 +42,10 @@ export default class InfioPlugin extends Plugin {
|
|||||||
async onload() {
|
async onload() {
|
||||||
await this.loadSettings()
|
await this.loadSettings()
|
||||||
|
|
||||||
|
// Add settings tab
|
||||||
|
this.settingTab = new InfioSettingTab(this.app, this)
|
||||||
|
this.addSettingTab(this.settingTab)
|
||||||
|
|
||||||
// This creates an icon in the left ribbon.
|
// This creates an icon in the left ribbon.
|
||||||
this.addRibbonIcon('wand-sparkles', 'Open infio copilot', () =>
|
this.addRibbonIcon('wand-sparkles', 'Open infio copilot', () =>
|
||||||
this.openChatView(),
|
this.openChatView(),
|
||||||
@ -49,14 +54,12 @@ export default class InfioPlugin extends Plugin {
|
|||||||
this.registerView(CHAT_VIEW_TYPE, (leaf) => new ChatView(leaf, this))
|
this.registerView(CHAT_VIEW_TYPE, (leaf) => new ChatView(leaf, this))
|
||||||
this.registerView(APPLY_VIEW_TYPE, (leaf) => new ApplyView(leaf))
|
this.registerView(APPLY_VIEW_TYPE, (leaf) => new ApplyView(leaf))
|
||||||
|
|
||||||
// This adds a settings tab so the user can configure various aspects of the plugin
|
|
||||||
this.addSettingTab(new InfioSettingTab(this.app, this))
|
|
||||||
|
|
||||||
// Register markdown processor for ai blocks
|
// Register markdown processor for ai blocks
|
||||||
this.inlineEdit = new InlineEdit(this, this.settings);
|
this.inlineEdit = new InlineEdit(this, this.settings);
|
||||||
this.registerMarkdownCodeBlockProcessor("infioedit", (source, el, ctx) => {
|
this.registerMarkdownCodeBlockProcessor("infioedit", (source, el, ctx) => {
|
||||||
this.inlineEdit?.Processor(source, el, ctx);
|
this.inlineEdit?.Processor(source, el, ctx);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update inlineEdit when settings change
|
// Update inlineEdit when settings change
|
||||||
this.addSettingsListener((newSettings) => {
|
this.addSettingsListener((newSettings) => {
|
||||||
this.inlineEdit = new InlineEdit(this, newSettings);
|
this.inlineEdit = new InlineEdit(this, newSettings);
|
||||||
|
|||||||
@ -3,7 +3,8 @@ import React from "react";
|
|||||||
import InfioPlugin from "../main";
|
import InfioPlugin from "../main";
|
||||||
import { InfioSettings } from "../types/settings";
|
import { InfioSettings } from "../types/settings";
|
||||||
|
|
||||||
import ModelsSettings from "./ModelsSettings";
|
// import ModelsSettings from "./ModelsSettings";
|
||||||
|
import ProviderSettings from "./ProviderSettings";
|
||||||
|
|
||||||
type CustomSettingsProps = {
|
type CustomSettingsProps = {
|
||||||
plugin: InfioPlugin;
|
plugin: InfioPlugin;
|
||||||
@ -14,16 +15,18 @@ const CustomSettings: React.FC<CustomSettingsProps> = ({ plugin }) => {
|
|||||||
|
|
||||||
const handleSettingsUpdate = async (newSettings: InfioSettings) => {
|
const handleSettingsUpdate = async (newSettings: InfioSettings) => {
|
||||||
await plugin.setSettings(newSettings);
|
await plugin.setSettings(newSettings);
|
||||||
|
// Force refresh the settings page to update dropdowns
|
||||||
|
plugin.settingTab.display();
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<h1 style={{ display: "flex", alignItems: "center", justifyContent: "space-between" }}>
|
<h1 className="infio-llm-setting-title">
|
||||||
<div>
|
<div>
|
||||||
Infio Settings <small>v{settings.version}</small>
|
Infio Settings <small>v{settings.version}</small>
|
||||||
</div>
|
</div>
|
||||||
</h1>
|
</h1>
|
||||||
<ModelsSettings settings={settings} setSettings={handleSettingsUpdate} />
|
<ProviderSettings settings={settings} setSettings={handleSettingsUpdate} />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import React from "react";
|
import React, { useEffect, useState } from "react";
|
||||||
|
|
||||||
export type DropdownComponentProps = {
|
export type DropdownComponentProps = {
|
||||||
name: string;
|
name: string;
|
||||||
@ -21,7 +21,7 @@ export const DropdownComponent: React.FC<DropdownComponentProps> = ({
|
|||||||
<select
|
<select
|
||||||
value={value}
|
value={value}
|
||||||
onChange={(e) => onChange(e.target.value)}
|
onChange={(e) => onChange(e.target.value)}
|
||||||
className="infio-llm-setting-item-control"
|
className="infio-llm-setting-item-control, infio-llm-setting-model-id"
|
||||||
>
|
>
|
||||||
{options.map((option) => (
|
{options.map((option) => (
|
||||||
<option key={option} value={option}>
|
<option key={option} value={option}>
|
||||||
@ -33,7 +33,7 @@ export const DropdownComponent: React.FC<DropdownComponentProps> = ({
|
|||||||
);
|
);
|
||||||
|
|
||||||
export type TextComponentProps = {
|
export type TextComponentProps = {
|
||||||
name: string;
|
name?: string;
|
||||||
description?: string;
|
description?: string;
|
||||||
placeholder: string;
|
placeholder: string;
|
||||||
value: string;
|
value: string;
|
||||||
@ -48,23 +48,49 @@ export const TextComponent: React.FC<TextComponentProps> = ({
|
|||||||
value,
|
value,
|
||||||
type = "text",
|
type = "text",
|
||||||
onChange,
|
onChange,
|
||||||
}) => (
|
}) => {
|
||||||
<div className="infio-llm-setting-item">
|
const [localValue, setLocalValue] = useState(value);
|
||||||
<div className="infio-llm-setting-item-name">{name}</div>
|
|
||||||
{description && <div className="infio-llm-setting-item-description">{description}</div>}
|
// Update local value when prop value changes (e.g., provider change)
|
||||||
<input
|
useEffect(() => {
|
||||||
type={type}
|
setLocalValue(value);
|
||||||
className="infio-llm-setting-item-control"
|
}, [value]);
|
||||||
placeholder={placeholder}
|
|
||||||
value={value}
|
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||||
onChange={(e) => onChange(e.target.value)}
|
setLocalValue(e.target.value);
|
||||||
/>
|
};
|
||||||
</div>
|
|
||||||
);
|
const handleBlur = () => {
|
||||||
|
if (localValue !== value) {
|
||||||
|
onChange(localValue);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
e.currentTarget.blur();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="infio-llm-setting-item">
|
||||||
|
<div className="infio-llm-setting-item-name">{name}</div>
|
||||||
|
{description && <div className="infio-llm-setting-item-description">{description}</div>}
|
||||||
|
<input
|
||||||
|
type={type}
|
||||||
|
className="infio-llm-setting-item-control"
|
||||||
|
placeholder={placeholder}
|
||||||
|
value={localValue}
|
||||||
|
onChange={handleChange}
|
||||||
|
onBlur={handleBlur}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
export type ToggleComponentProps = {
|
export type ToggleComponentProps = {
|
||||||
name?: string;
|
name: string;
|
||||||
description?: string;
|
|
||||||
value: boolean;
|
value: boolean;
|
||||||
onChange: (value: boolean) => void;
|
onChange: (value: boolean) => void;
|
||||||
disabled?: boolean;
|
disabled?: boolean;
|
||||||
@ -72,14 +98,11 @@ export type ToggleComponentProps = {
|
|||||||
|
|
||||||
export const ToggleComponent: React.FC<ToggleComponentProps> = ({
|
export const ToggleComponent: React.FC<ToggleComponentProps> = ({
|
||||||
name,
|
name,
|
||||||
description,
|
|
||||||
value,
|
value,
|
||||||
onChange,
|
onChange,
|
||||||
disabled = false,
|
disabled = false,
|
||||||
}) => (
|
}) => (
|
||||||
<div className="infio-llm-setting-item">
|
<div className="infio-llm-setting-item">
|
||||||
{name && <div className="infio-llm-setting-item-name">{name}</div>}
|
|
||||||
{description && <div className="infio-llm-setting-item-description">{description}</div>}
|
|
||||||
<label className={`switch ${disabled ? "disabled" : ""}`}>
|
<label className={`switch ${disabled ? "disabled" : ""}`}>
|
||||||
<input
|
<input
|
||||||
type="checkbox"
|
type="checkbox"
|
||||||
@ -87,7 +110,7 @@ export const ToggleComponent: React.FC<ToggleComponentProps> = ({
|
|||||||
onChange={(e) => onChange(e.target.checked)}
|
onChange={(e) => onChange(e.target.checked)}
|
||||||
disabled={disabled}
|
disabled={disabled}
|
||||||
/>
|
/>
|
||||||
<span className="slider round"></span>
|
<span className="infio-llm-setting-checkbox-name">{name}</span>
|
||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
314
src/settings/ProviderModelsPicker.tsx
Normal file
314
src/settings/ProviderModelsPicker.tsx
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
import * as Popover from "@radix-ui/react-popover";
|
||||||
|
import Fuse, { FuseResult } from "fuse.js";
|
||||||
|
import React, { useEffect, useMemo, useRef, useState } from "react";
|
||||||
|
|
||||||
|
import { ApiProvider } from "../types/llm/model";
|
||||||
|
// import { PROVIDERS } from '../constants';
|
||||||
|
import { GetAllProviders, GetEmbeddingProviderModelIds, GetEmbeddingProviders, GetProviderModelIds } from "../utils/api";
|
||||||
|
|
||||||
|
type TextSegment = {
|
||||||
|
text: string;
|
||||||
|
isHighlighted: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
type SearchableItem = {
|
||||||
|
id: string;
|
||||||
|
html: string | TextSegment[];
|
||||||
|
};
|
||||||
|
|
||||||
|
type HighlightedItem = {
|
||||||
|
id: string;
|
||||||
|
html: TextSegment[];
|
||||||
|
};
|
||||||
|
|
||||||
|
// Type guard for Record<string, unknown>
|
||||||
|
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||||
|
return typeof value === 'object' && value !== null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://gist.github.com/evenfrost/1ba123656ded32fb7a0cd4651efd4db0
|
||||||
|
export const highlight = (fuseSearchResult: FuseResult<SearchableItem>[]): HighlightedItem[] => {
|
||||||
|
const set = (obj: Record<string, unknown>, path: string, value: TextSegment[]): void => {
|
||||||
|
const pathValue = path.split(".")
|
||||||
|
let i: number
|
||||||
|
let current = obj
|
||||||
|
|
||||||
|
for (i = 0; i < pathValue.length - 1; i++) {
|
||||||
|
const nextValue = current[pathValue[i]]
|
||||||
|
if (isRecord(nextValue)) {
|
||||||
|
current = nextValue
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid path: ${path}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
current[pathValue[i]] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to merge overlapping regions
|
||||||
|
const mergeRegions = (regions: [number, number][]): [number, number][] => {
|
||||||
|
if (regions.length === 0) return regions
|
||||||
|
|
||||||
|
// Sort regions by start index
|
||||||
|
regions.sort((a, b) => a[0] - b[0])
|
||||||
|
|
||||||
|
const merged: [number, number][] = [regions[0]]
|
||||||
|
|
||||||
|
for (let i = 1; i < regions.length; i++) {
|
||||||
|
const last = merged[merged.length - 1]
|
||||||
|
const current = regions[i]
|
||||||
|
|
||||||
|
if (current[0] <= last[1] + 1) {
|
||||||
|
// Overlapping or adjacent regions
|
||||||
|
last[1] = Math.max(last[1], current[1])
|
||||||
|
} else {
|
||||||
|
merged.push(current)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return merged
|
||||||
|
}
|
||||||
|
|
||||||
|
const generateHighlightedSegments = (inputText: string, regions: [number, number][] = []): TextSegment[] => {
|
||||||
|
if (regions.length === 0) {
|
||||||
|
return [{ text: inputText, isHighlighted: false }];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort and merge overlapping regions
|
||||||
|
const mergedRegions = mergeRegions(regions);
|
||||||
|
const segments: TextSegment[] = [];
|
||||||
|
let nextUnhighlightedRegionStartingIndex = 0;
|
||||||
|
|
||||||
|
mergedRegions.forEach((region) => {
|
||||||
|
const start = region[0];
|
||||||
|
const end = region[1];
|
||||||
|
const lastRegionNextIndex = end + 1;
|
||||||
|
|
||||||
|
// Add unhighlighted segment before the highlight
|
||||||
|
if (nextUnhighlightedRegionStartingIndex < start) {
|
||||||
|
segments.push({
|
||||||
|
text: inputText.substring(nextUnhighlightedRegionStartingIndex, start),
|
||||||
|
isHighlighted: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add highlighted segment
|
||||||
|
segments.push({
|
||||||
|
text: inputText.substring(start, lastRegionNextIndex),
|
||||||
|
isHighlighted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
nextUnhighlightedRegionStartingIndex = lastRegionNextIndex;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add remaining unhighlighted text
|
||||||
|
if (nextUnhighlightedRegionStartingIndex < inputText.length) {
|
||||||
|
segments.push({
|
||||||
|
text: inputText.substring(nextUnhighlightedRegionStartingIndex),
|
||||||
|
isHighlighted: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return segments;
|
||||||
|
}
|
||||||
|
|
||||||
|
return fuseSearchResult
|
||||||
|
.filter(({ matches }) => matches && matches.length)
|
||||||
|
.map(({ item, matches }): HighlightedItem => {
|
||||||
|
const highlightedItem: HighlightedItem = {
|
||||||
|
id: item.id,
|
||||||
|
html: typeof item.html === 'string' ? [{ text: item.html, isHighlighted: false }] : [...item.html]
|
||||||
|
}
|
||||||
|
|
||||||
|
matches?.forEach((match) => {
|
||||||
|
if (match.key && typeof match.value === "string" && match.indices) {
|
||||||
|
const mergedIndices = mergeRegions([...match.indices])
|
||||||
|
set(highlightedItem, match.key, generateHighlightedSegments(match.value, mergedIndices))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return highlightedItem
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const HighlightedText: React.FC<{ segments: TextSegment[] }> = ({ segments }) => {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{segments.map((segment, index) => (
|
||||||
|
segment.isHighlighted ? (
|
||||||
|
<span key={index} className="infio-llm-setting-model-item-highlight">{segment.text}</span>
|
||||||
|
) : (
|
||||||
|
<span key={index}>{segment.text}</span>
|
||||||
|
)
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export type ComboBoxComponentProps = {
|
||||||
|
name: string;
|
||||||
|
provider: ApiProvider;
|
||||||
|
modelId: string;
|
||||||
|
isEmbedding?: boolean,
|
||||||
|
updateModel: (provider: ApiProvider, modelId: string) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const ComboBoxComponent: React.FC<ComboBoxComponentProps> = ({
|
||||||
|
name,
|
||||||
|
provider,
|
||||||
|
modelId,
|
||||||
|
isEmbedding = false,
|
||||||
|
updateModel,
|
||||||
|
}) => {
|
||||||
|
// 提供商选择状态
|
||||||
|
const [modelProvider, setModelProvider] = useState(provider);
|
||||||
|
|
||||||
|
// 搜索输入状态
|
||||||
|
const [searchTerm, setSearchTerm] = useState("");
|
||||||
|
const [isOpen, setIsOpen] = useState(false);
|
||||||
|
const [selectedIndex, setSelectedIndex] = useState(0);
|
||||||
|
|
||||||
|
const providers = isEmbedding ? GetEmbeddingProviders() : GetAllProviders()
|
||||||
|
|
||||||
|
const modelIds = useMemo(() => {
|
||||||
|
return isEmbedding ? GetEmbeddingProviderModelIds(modelProvider) : GetProviderModelIds(modelProvider)
|
||||||
|
}, [modelProvider])
|
||||||
|
|
||||||
|
const searchableItems = useMemo(() => {
|
||||||
|
return modelIds.map((id) => ({
|
||||||
|
id,
|
||||||
|
html: id,
|
||||||
|
}))
|
||||||
|
}, [modelIds])
|
||||||
|
|
||||||
|
// 初始化 fuse,用于模糊搜索,简单配置 threshold 可按需调整
|
||||||
|
const fuse = useMemo(() => {
|
||||||
|
return new Fuse(searchableItems, {
|
||||||
|
keys: ["html"],
|
||||||
|
threshold: 0.6,
|
||||||
|
shouldSort: true,
|
||||||
|
isCaseSensitive: false,
|
||||||
|
ignoreLocation: false,
|
||||||
|
includeMatches: true,
|
||||||
|
minMatchCharLength: 1,
|
||||||
|
})
|
||||||
|
}, [searchableItems])
|
||||||
|
|
||||||
|
// 根据 searchTerm 得到过滤后的数据列表
|
||||||
|
const filteredOptions = useMemo(() => {
|
||||||
|
const results: HighlightedItem[] = searchTerm
|
||||||
|
? highlight(fuse.search(searchTerm))
|
||||||
|
: searchableItems.map(item => ({
|
||||||
|
...item,
|
||||||
|
html: [{ text: item.html, isHighlighted: false }]
|
||||||
|
}))
|
||||||
|
return results
|
||||||
|
}, [searchableItems, searchTerm, fuse])
|
||||||
|
|
||||||
|
const listRef = useRef<HTMLDivElement>(null);
|
||||||
|
const itemRefs = useRef<Array<HTMLDivElement | null>>([]);
|
||||||
|
|
||||||
|
// 当选中项发生变化时,滚动到可视区域内
|
||||||
|
useEffect(() => {
|
||||||
|
if (itemRefs.current[selectedIndex]) {
|
||||||
|
itemRefs.current[selectedIndex]?.scrollIntoView({
|
||||||
|
block: "nearest",
|
||||||
|
behavior: "smooth"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [selectedIndex]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="infio-llm-setting-item">
|
||||||
|
<div className="infio-llm-setting-item-name">{name}</div>
|
||||||
|
<Popover.Root modal={false} open={isOpen} onOpenChange={setIsOpen}>
|
||||||
|
<Popover.Trigger asChild>
|
||||||
|
<div className="infio-llm-setting-item-control">
|
||||||
|
<span className="infio-llm-setting-model-id">{modelId}</span>
|
||||||
|
</div>
|
||||||
|
</Popover.Trigger>
|
||||||
|
<Popover.Content
|
||||||
|
side="bottom"
|
||||||
|
align="start"
|
||||||
|
sideOffset={4}
|
||||||
|
className="infio-llm-setting-combobox-dropdown"
|
||||||
|
>
|
||||||
|
<div ref={listRef}>
|
||||||
|
<div className="infio-llm-setting-search-container">
|
||||||
|
<select
|
||||||
|
className="infio-llm-setting-provider-switch"
|
||||||
|
value={modelProvider}
|
||||||
|
onChange={(e) => setModelProvider(e.target.value)}
|
||||||
|
>
|
||||||
|
{providers.map((provider) => (
|
||||||
|
<option
|
||||||
|
key={provider}
|
||||||
|
value={provider}
|
||||||
|
className={`infio-llm-setting-provider-option ${provider === modelProvider ? 'is-active' : ''}`}
|
||||||
|
>
|
||||||
|
{provider}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
className="infio-llm-setting-item-search"
|
||||||
|
placeholder="search model..."
|
||||||
|
value={searchTerm}
|
||||||
|
onChange={(e) => {
|
||||||
|
setSearchTerm(e.target.value);
|
||||||
|
setSelectedIndex(0);
|
||||||
|
}}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
switch (e.key) {
|
||||||
|
case "ArrowDown":
|
||||||
|
e.preventDefault();
|
||||||
|
setSelectedIndex((prev) =>
|
||||||
|
Math.min(prev + 1, filteredOptions.length - 1)
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
case "ArrowUp":
|
||||||
|
e.preventDefault();
|
||||||
|
setSelectedIndex((prev) => Math.max(prev - 1, 0));
|
||||||
|
break;
|
||||||
|
case "Enter": {
|
||||||
|
e.preventDefault();
|
||||||
|
const selectedOption = filteredOptions[selectedIndex];
|
||||||
|
if (selectedOption) {
|
||||||
|
updateModel(modelProvider, selectedOption.id);
|
||||||
|
setSearchTerm("");
|
||||||
|
setIsOpen(false);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case "Escape":
|
||||||
|
e.preventDefault();
|
||||||
|
setIsOpen(false);
|
||||||
|
setSearchTerm("");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{filteredOptions.map((option, index) => (
|
||||||
|
<Popover.Close key={option.id} asChild>
|
||||||
|
<div
|
||||||
|
ref={(el) => (itemRefs.current[index] = el)}
|
||||||
|
onMouseEnter={() => setSelectedIndex(index)}
|
||||||
|
onClick={() => {
|
||||||
|
updateModel(modelProvider, option.id);
|
||||||
|
setSearchTerm("");
|
||||||
|
setIsOpen(false);
|
||||||
|
}}
|
||||||
|
className={`infio-llm-setting-combobox-option ${index === selectedIndex ? 'is-selected' : ''}`}
|
||||||
|
>
|
||||||
|
<HighlightedText segments={option.html} />
|
||||||
|
</div>
|
||||||
|
</Popover.Close>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</Popover.Content>
|
||||||
|
</Popover.Root>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
186
src/settings/ProviderSettings.tsx
Normal file
186
src/settings/ProviderSettings.tsx
Normal file
@ -0,0 +1,186 @@
|
|||||||
|
import React, { useMemo, useState } from 'react';
|
||||||
|
|
||||||
|
// import { PROVIDERS } from '../constants';
|
||||||
|
import { ApiProvider } from '../types/llm/model';
|
||||||
|
import { InfioSettings } from '../types/settings';
|
||||||
|
import { GetAllProviders } from '../utils/api';
|
||||||
|
// import { siliconFlowDefaultModelId } from '../utils/api';
|
||||||
|
|
||||||
|
import { DropdownComponent, TextComponent, ToggleComponent } from './FormComponents';
|
||||||
|
import { ComboBoxComponent } from './ProviderModelsPicker';
|
||||||
|
|
||||||
|
type ProviderSettingKey =
|
||||||
|
| 'infioProvider'
|
||||||
|
| 'openrouterProvider'
|
||||||
|
| 'openaiProvider'
|
||||||
|
| 'siliconflowProvider'
|
||||||
|
| 'alibabaQwenProvider'
|
||||||
|
| 'anthropicProvider'
|
||||||
|
| 'deepseekProvider'
|
||||||
|
| 'googleProvider'
|
||||||
|
| 'groqProvider'
|
||||||
|
| 'ollamaProvider'
|
||||||
|
| 'openaicompatibleProvider';
|
||||||
|
|
||||||
|
interface ProviderSettingsProps {
|
||||||
|
settings: InfioSettings;
|
||||||
|
setSettings: (settings: InfioSettings) => Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const keyMap: Record<ApiProvider, ProviderSettingKey> = {
|
||||||
|
'Infio': 'infioProvider',
|
||||||
|
'OpenRouter': 'openrouterProvider',
|
||||||
|
'OpenAI': 'openaiProvider',
|
||||||
|
'SiliconFlow': 'siliconflowProvider',
|
||||||
|
'AlibabaQwen': 'alibabaQwenProvider',
|
||||||
|
'Anthropic': 'anthropicProvider',
|
||||||
|
'Deepseek': 'deepseekProvider',
|
||||||
|
'Google': 'googleProvider',
|
||||||
|
'Groq': 'groqProvider',
|
||||||
|
'Ollama': 'ollamaProvider',
|
||||||
|
'OpenAICompatible': 'openaicompatibleProvider'
|
||||||
|
};
|
||||||
|
|
||||||
|
const getProviderSettingKey = (provider: ApiProvider): ProviderSettingKey => {
|
||||||
|
return keyMap[provider];
|
||||||
|
};
|
||||||
|
|
||||||
|
const PROVIDERS = GetAllProviders();
|
||||||
|
|
||||||
|
const ProviderSettings: React.FC<ProviderSettingsProps> = ({ settings, setSettings }) => {
|
||||||
|
const [currProvider, setCurrProvider] = useState(settings.defaultProvider);
|
||||||
|
|
||||||
|
const providerSetting = useMemo(() => {
|
||||||
|
const providerKey = getProviderSettingKey(currProvider);
|
||||||
|
return settings[providerKey] || {};
|
||||||
|
}, [currProvider, settings]);
|
||||||
|
|
||||||
|
const updateProvider = (provider: ApiProvider) => {
|
||||||
|
setCurrProvider(provider);
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
defaultProvider: provider
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateProviderApiKey = (value: string) => {
|
||||||
|
const providerKey = getProviderSettingKey(currProvider);
|
||||||
|
const providerSettings = settings[providerKey];
|
||||||
|
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
[providerKey]: {
|
||||||
|
...providerSettings,
|
||||||
|
apiKey: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateProviderUseCustomUrl = (value: boolean) => {
|
||||||
|
const providerKey = getProviderSettingKey(currProvider);
|
||||||
|
const providerSettings = settings[providerKey];
|
||||||
|
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
[providerKey]: {
|
||||||
|
...providerSettings,
|
||||||
|
useCustomUrl: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateProviderBaseUrl = (value: string) => {
|
||||||
|
const providerKey = getProviderSettingKey(currProvider);
|
||||||
|
const providerSettings = settings[providerKey];
|
||||||
|
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
[providerKey]: {
|
||||||
|
...providerSettings,
|
||||||
|
baseUrl: value
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateChatModelId = (provider: ApiProvider, modelId: string) => {
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
chatModelProvider: provider,
|
||||||
|
chatModelId: modelId
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateApplyModelId = (provider: ApiProvider, modelId: string) => {
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
applyModelProvider: provider,
|
||||||
|
applyModelId: modelId
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const updateEmbeddingModelId = (provider: ApiProvider, modelId: string) => {
|
||||||
|
setSettings({
|
||||||
|
...settings,
|
||||||
|
embeddingModelProvider: provider,
|
||||||
|
embeddingModelId: modelId
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="infio-provider">
|
||||||
|
<DropdownComponent
|
||||||
|
name="API Provider:"
|
||||||
|
value={currProvider}
|
||||||
|
options={PROVIDERS}
|
||||||
|
onChange={updateProvider}
|
||||||
|
/>
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<TextComponent
|
||||||
|
name={currProvider + " API Key:"}
|
||||||
|
placeholder="Enter your API key"
|
||||||
|
value={providerSetting.apiKey || ''}
|
||||||
|
onChange={updateProviderApiKey}
|
||||||
|
type="password"
|
||||||
|
/>
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<ToggleComponent
|
||||||
|
name="Use custom base URL"
|
||||||
|
value={providerSetting.useCustomUrl || false}
|
||||||
|
onChange={updateProviderUseCustomUrl}
|
||||||
|
/>
|
||||||
|
{providerSetting.useCustomUrl && (
|
||||||
|
<TextComponent
|
||||||
|
placeholder="Enter your custom API endpoint URL"
|
||||||
|
value={providerSetting.baseUrl || ''}
|
||||||
|
onChange={updateProviderBaseUrl}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<ComboBoxComponent
|
||||||
|
name="Chat Model:"
|
||||||
|
provider={settings.chatModelProvider || currProvider}
|
||||||
|
modelId={settings.chatModelId}
|
||||||
|
updateModel={updateChatModelId}
|
||||||
|
/>
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<ComboBoxComponent
|
||||||
|
name="Apply Model:"
|
||||||
|
provider={settings.applyModelProvider || currProvider}
|
||||||
|
modelId={settings.applyModelId}
|
||||||
|
updateModel={updateApplyModelId}
|
||||||
|
/>
|
||||||
|
<div className="iinfio-llm-setting-divider"></div>
|
||||||
|
<ComboBoxComponent
|
||||||
|
name="Embedding Model:"
|
||||||
|
provider={settings.embeddingModelProvider || ApiProvider.Google}
|
||||||
|
modelId={settings.embeddingModelId}
|
||||||
|
isEmbedding={true}
|
||||||
|
updateModel={updateEmbeddingModelId}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ProviderSettings;
|
||||||
@ -31,8 +31,6 @@ export class InfioSettingTab extends PluginSettingTab {
|
|||||||
const { containerEl } = this
|
const { containerEl } = this
|
||||||
containerEl.empty()
|
containerEl.empty()
|
||||||
this.renderModelsSection(containerEl)
|
this.renderModelsSection(containerEl)
|
||||||
this.renderAPIKeysSection(containerEl)
|
|
||||||
this.renderDefaultModelSection(containerEl)
|
|
||||||
this.renderRAGSection(containerEl)
|
this.renderRAGSection(containerEl)
|
||||||
this.renderAutoCompleteSection(containerEl)
|
this.renderAutoCompleteSection(containerEl)
|
||||||
}
|
}
|
||||||
@ -108,7 +106,7 @@ export class InfioSettingTab extends PluginSettingTab {
|
|||||||
)
|
)
|
||||||
|
|
||||||
new Setting(containerEl)
|
new Setting(containerEl)
|
||||||
.setName('Gemini API key')
|
.setName('Google API key')
|
||||||
.setClass("infio-chat-setting-item-container-append")
|
.setClass("infio-chat-setting-item-container-append")
|
||||||
.addText((text) =>
|
.addText((text) =>
|
||||||
text
|
text
|
||||||
@ -725,6 +723,8 @@ export class InfioSettingTab extends PluginSettingTab {
|
|||||||
<AutoCompleteSettings
|
<AutoCompleteSettings
|
||||||
onSettingsChanged={async (settings) => {
|
onSettingsChanged={async (settings) => {
|
||||||
this.plugin.setSettings(settings);
|
this.plugin.setSettings(settings);
|
||||||
|
// Force refresh the settings page to update dropdowns
|
||||||
|
this.plugin.settingTab.display();
|
||||||
}}
|
}}
|
||||||
settings={this.plugin.settings}
|
settings={this.plugin.settings}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -2,7 +2,7 @@ import { SerializedEditorState } from 'lexical'
|
|||||||
|
|
||||||
import { SelectVector } from '../database/schema'
|
import { SelectVector } from '../database/schema'
|
||||||
|
|
||||||
import { CustomLLMModel } from './llm/model'
|
import { LLMModel } from './llm/model'
|
||||||
import { ContentPart } from './llm/request'
|
import { ContentPart } from './llm/request'
|
||||||
import { ResponseUsage } from './llm/response'
|
import { ResponseUsage } from './llm/response'
|
||||||
import { Mentionable, SerializedMentionable } from './mentionable'
|
import { Mentionable, SerializedMentionable } from './mentionable'
|
||||||
@ -17,15 +17,17 @@ export type ChatUserMessage = {
|
|||||||
similarity: number
|
similarity: number
|
||||||
})[]
|
})[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ChatAssistantMessage = {
|
export type ChatAssistantMessage = {
|
||||||
role: 'assistant'
|
role: 'assistant'
|
||||||
content: string
|
content: string
|
||||||
id: string
|
id: string
|
||||||
metadata?: {
|
metadata?: {
|
||||||
usage?: ResponseUsage
|
usage?: ResponseUsage
|
||||||
model?: CustomLLMModel
|
model?: LLMModel
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ChatMessage = ChatUserMessage | ChatAssistantMessage
|
export type ChatMessage = ChatUserMessage | ChatAssistantMessage
|
||||||
|
|
||||||
export type SerializedChatUserMessage = {
|
export type SerializedChatUserMessage = {
|
||||||
@ -38,15 +40,17 @@ export type SerializedChatUserMessage = {
|
|||||||
similarity: number
|
similarity: number
|
||||||
})[]
|
})[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SerializedChatAssistantMessage = {
|
export type SerializedChatAssistantMessage = {
|
||||||
role: 'assistant'
|
role: 'assistant'
|
||||||
content: string
|
content: string
|
||||||
id: string
|
id: string
|
||||||
metadata?: {
|
metadata?: {
|
||||||
usage?: ResponseUsage
|
usage?: ResponseUsage
|
||||||
model?: CustomLLMModel
|
model?: LLMModel
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SerializedChatMessage =
|
export type SerializedChatMessage =
|
||||||
| SerializedChatUserMessage
|
| SerializedChatUserMessage
|
||||||
| SerializedChatAssistantMessage
|
| SerializedChatAssistantMessage
|
||||||
|
|||||||
@ -1,4 +1,6 @@
|
|||||||
import { CustomLLMModel } from './llm/model'
|
import { LLMModel } from './llm/model'
|
||||||
|
|
||||||
|
import { EmbeddingModelInfo } from '../utils/api'
|
||||||
|
|
||||||
export type EmbeddingModelId =
|
export type EmbeddingModelId =
|
||||||
| 'text-embedding-3-small'
|
| 'text-embedding-3-small'
|
||||||
@ -10,12 +12,12 @@ export type EmbeddingModelId =
|
|||||||
export type EmbeddingModelOption = {
|
export type EmbeddingModelOption = {
|
||||||
id: EmbeddingModelId
|
id: EmbeddingModelId
|
||||||
name: string
|
name: string
|
||||||
model: CustomLLMModel
|
model: LLMModel
|
||||||
dimension: number
|
dimension: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export type EmbeddingModel = {
|
export type EmbeddingModel = {
|
||||||
id: EmbeddingModelId
|
id: string
|
||||||
dimension: number
|
dimension: number
|
||||||
getEmbedding: (text: string) => Promise<number[]>
|
getEmbedding: (text: string) => Promise<number[]>
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,3 +1,22 @@
|
|||||||
|
export enum ApiProvider {
|
||||||
|
Infio = "Infio",
|
||||||
|
OpenRouter = "OpenRouter",
|
||||||
|
SiliconFlow = "SiliconFlow",
|
||||||
|
AlibabaQwen = "AlibabaQwen",
|
||||||
|
Anthropic = "Anthropic",
|
||||||
|
Deepseek = "Deepseek",
|
||||||
|
OpenAI = "OpenAI",
|
||||||
|
Google = "Google",
|
||||||
|
Groq = "Groq",
|
||||||
|
Ollama = "Ollama",
|
||||||
|
OpenAICompatible = "OpenAICompatible",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type LLMModel = {
|
||||||
|
provider: ApiProvider;
|
||||||
|
modelId: string;
|
||||||
|
}
|
||||||
|
|
||||||
// Model Providers
|
// Model Providers
|
||||||
export enum ModelProviders {
|
export enum ModelProviders {
|
||||||
OPENAI = "openai",
|
OPENAI = "openai",
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
|
||||||
import { DEFAULT_MODELS } from '../constants';
|
import { DEFAULT_MODELS } from '../constants';
|
||||||
import {
|
import {
|
||||||
MAX_DELAY,
|
MAX_DELAY,
|
||||||
@ -11,10 +10,143 @@ import {
|
|||||||
modelOptionsSchema
|
modelOptionsSchema
|
||||||
} from '../settings/versions/shared';
|
} from '../settings/versions/shared';
|
||||||
import { DEFAULT_AUTOCOMPLETE_SETTINGS } from "../settings/versions/v1/v1";
|
import { DEFAULT_AUTOCOMPLETE_SETTINGS } from "../settings/versions/v1/v1";
|
||||||
|
import { ApiProvider } from '../types/llm/model';
|
||||||
import { isRegexValid, isValidIgnorePattern } from '../utils/auto-complete';
|
import { isRegexValid, isValidIgnorePattern } from '../utils/auto-complete';
|
||||||
|
|
||||||
export const SETTINGS_SCHEMA_VERSION = 0.1
|
export const SETTINGS_SCHEMA_VERSION = 0.1
|
||||||
|
|
||||||
|
const InfioProviderSchema = z.object({
|
||||||
|
name: z.literal('Infio'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Infio',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const OpenRouterProviderSchema = z.object({
|
||||||
|
name: z.literal('OpenRouter'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'OpenRouter',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const SiliconFlowProviderSchema = z.object({
|
||||||
|
name: z.literal('SiliconFlow'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'SiliconFlow',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const AlibabaQwenProviderSchema = z.object({
|
||||||
|
name: z.literal('AlibabaQwen'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'AlibabaQwen',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const AnthropicProviderSchema = z.object({
|
||||||
|
name: z.literal('Anthropic'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().optional(),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Anthropic',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const DeepSeekProviderSchema = z.object({
|
||||||
|
name: z.literal('DeepSeek'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'DeepSeek',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const GoogleProviderSchema = z.object({
|
||||||
|
name: z.literal('Google'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Google',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const OpenAIProviderSchema = z.object({
|
||||||
|
name: z.literal('OpenAI'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().optional(),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'OpenAI',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const OpenAICompatibleProviderSchema = z.object({
|
||||||
|
name: z.literal('OpenAICompatible'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().optional(),
|
||||||
|
useCustomUrl: z.boolean().catch(true)
|
||||||
|
}).catch({
|
||||||
|
name: 'OpenAICompatible',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: true
|
||||||
|
})
|
||||||
|
|
||||||
|
const OllamaProviderSchema = z.object({
|
||||||
|
name: z.literal('Ollama'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Ollama',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const GroqProviderSchema = z.object({
|
||||||
|
name: z.literal('Groq'),
|
||||||
|
apiKey: z.string().catch(''),
|
||||||
|
baseUrl: z.string().catch(''),
|
||||||
|
useCustomUrl: z.boolean().catch(false)
|
||||||
|
}).catch({
|
||||||
|
name: 'Groq',
|
||||||
|
apiKey: '',
|
||||||
|
baseUrl: '',
|
||||||
|
useCustomUrl: false
|
||||||
|
})
|
||||||
|
|
||||||
const ollamaModelSchema = z.object({
|
const ollamaModelSchema = z.object({
|
||||||
baseUrl: z.string().catch(''),
|
baseUrl: z.string().catch(''),
|
||||||
model: z.string().catch(''),
|
model: z.string().catch(''),
|
||||||
@ -61,7 +193,34 @@ export const InfioSettingsSchema = z.object({
|
|||||||
// Version
|
// Version
|
||||||
version: z.literal(SETTINGS_SCHEMA_VERSION).catch(SETTINGS_SCHEMA_VERSION),
|
version: z.literal(SETTINGS_SCHEMA_VERSION).catch(SETTINGS_SCHEMA_VERSION),
|
||||||
|
|
||||||
// activeModels
|
// Provider
|
||||||
|
defaultProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
|
||||||
|
infioProvider: InfioProviderSchema,
|
||||||
|
openrouterProvider: OpenRouterProviderSchema,
|
||||||
|
siliconflowProvider: SiliconFlowProviderSchema,
|
||||||
|
alibabaQwenProvider: AlibabaQwenProviderSchema,
|
||||||
|
anthropicProvider: AnthropicProviderSchema,
|
||||||
|
deepseekProvider: DeepSeekProviderSchema,
|
||||||
|
openaiProvider: OpenAIProviderSchema,
|
||||||
|
googleProvider: GoogleProviderSchema,
|
||||||
|
ollamaProvider: OllamaProviderSchema,
|
||||||
|
groqProvider: GroqProviderSchema,
|
||||||
|
openaicompatibleProvider: OpenAICompatibleProviderSchema,
|
||||||
|
|
||||||
|
// Chat Model
|
||||||
|
chatModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
|
||||||
|
chatModelId: z.string().catch(''),
|
||||||
|
|
||||||
|
// Apply Model
|
||||||
|
applyModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.OpenRouter),
|
||||||
|
applyModelId: z.string().catch(''),
|
||||||
|
|
||||||
|
// Embedding Model
|
||||||
|
embeddingModelProvider: z.nativeEnum(ApiProvider).catch(ApiProvider.Google),
|
||||||
|
embeddingModelId: z.string().catch(''),
|
||||||
|
|
||||||
|
/// [compatible]
|
||||||
|
// activeModels [compatible]
|
||||||
activeModels: z.array(
|
activeModels: z.array(
|
||||||
z.object({
|
z.object({
|
||||||
name: z.string(),
|
name: z.string(),
|
||||||
@ -74,17 +233,17 @@ export const InfioSettingsSchema = z.object({
|
|||||||
dimension: z.number().optional(),
|
dimension: z.number().optional(),
|
||||||
})
|
})
|
||||||
).catch(DEFAULT_MODELS),
|
).catch(DEFAULT_MODELS),
|
||||||
|
// API Keys [compatible]
|
||||||
// API Keys
|
|
||||||
infioApiKey: z.string().catch(''),
|
infioApiKey: z.string().catch(''),
|
||||||
openAIApiKey: z.string().catch(''),
|
openAIApiKey: z.string().catch(''),
|
||||||
anthropicApiKey: z.string().catch(''),
|
anthropicApiKey: z.string().catch(''),
|
||||||
geminiApiKey: z.string().catch(''),
|
geminiApiKey: z.string().catch(''),
|
||||||
groqApiKey: z.string().catch(''),
|
groqApiKey: z.string().catch(''),
|
||||||
deepseekApiKey: z.string().catch(''),
|
deepseekApiKey: z.string().catch(''),
|
||||||
|
ollamaEmbeddingModel: ollamaModelSchema.catch({
|
||||||
// DEFAULT Chat Model
|
baseUrl: '',
|
||||||
chatModelId: z.string().catch('deepseek-chat'),
|
model: '',
|
||||||
|
}),
|
||||||
ollamaChatModel: ollamaModelSchema.catch({
|
ollamaChatModel: ollamaModelSchema.catch({
|
||||||
baseUrl: '',
|
baseUrl: '',
|
||||||
model: '',
|
model: '',
|
||||||
@ -94,9 +253,6 @@ export const InfioSettingsSchema = z.object({
|
|||||||
apiKey: '',
|
apiKey: '',
|
||||||
model: '',
|
model: '',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// DEFAULT Apply Model
|
|
||||||
applyModelId: z.string().catch('deepseek-chat'),
|
|
||||||
ollamaApplyModel: ollamaModelSchema.catch({
|
ollamaApplyModel: ollamaModelSchema.catch({
|
||||||
baseUrl: '',
|
baseUrl: '',
|
||||||
model: '',
|
model: '',
|
||||||
@ -107,15 +263,6 @@ export const InfioSettingsSchema = z.object({
|
|||||||
model: '',
|
model: '',
|
||||||
}),
|
}),
|
||||||
|
|
||||||
// DEFAULT Embedding Model
|
|
||||||
embeddingModelId: z.string().catch(
|
|
||||||
'text-embedding-004',
|
|
||||||
),
|
|
||||||
ollamaEmbeddingModel: ollamaModelSchema.catch({
|
|
||||||
baseUrl: '',
|
|
||||||
model: '',
|
|
||||||
}),
|
|
||||||
|
|
||||||
// System Prompt
|
// System Prompt
|
||||||
systemPrompt: z.string().catch(''),
|
systemPrompt: z.string().catch(''),
|
||||||
|
|
||||||
@ -132,10 +279,13 @@ export const InfioSettingsSchema = z.object({
|
|||||||
// autocomplete options
|
// autocomplete options
|
||||||
autocompleteEnabled: z.boolean(),
|
autocompleteEnabled: z.boolean(),
|
||||||
advancedMode: z.boolean(),
|
advancedMode: z.boolean(),
|
||||||
|
|
||||||
|
// [compatible]
|
||||||
apiProvider: z.enum(['azure', 'openai', "ollama"]),
|
apiProvider: z.enum(['azure', 'openai', "ollama"]),
|
||||||
azureOAIApiSettings: z.string().catch(''),
|
azureOAIApiSettings: z.string().catch(''),
|
||||||
openAIApiSettings: z.string().catch(''),
|
openAIApiSettings: z.string().catch(''),
|
||||||
ollamaApiSettings: z.string().catch(''),
|
ollamaApiSettings: z.string().catch(''),
|
||||||
|
|
||||||
triggers: z.array(triggerSchema),
|
triggers: z.array(triggerSchema),
|
||||||
delay: z.number().int().min(MIN_DELAY, { message: "Delay must be between 0ms and 2000ms" }).max(MAX_DELAY, { message: "Delay must be between 0ms and 2000ms" }),
|
delay: z.number().int().min(MIN_DELAY, { message: "Delay must be between 0ms and 2000ms" }).max(MAX_DELAY, { message: "Delay must be between 0ms and 2000ms" }),
|
||||||
modelOptions: modelOptionsSchema,
|
modelOptions: modelOptionsSchema,
|
||||||
|
|||||||
1202
src/utils/api.ts
Normal file
1202
src/utils/api.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,58 +1,24 @@
|
|||||||
import {
|
import { LLMModel } from '../types/llm/model'
|
||||||
ANTHROPIC_PRICES,
|
|
||||||
GEMINI_PRICES,
|
|
||||||
GROQ_PRICES,
|
|
||||||
OPENAI_PRICES,
|
|
||||||
} from '../constants'
|
|
||||||
import { CustomLLMModel } from '../types/llm/model'
|
|
||||||
import { ResponseUsage } from '../types/llm/response'
|
import { ResponseUsage } from '../types/llm/response'
|
||||||
|
|
||||||
|
import { GetProviderModels } from './api'
|
||||||
|
|
||||||
// Returns the cost in dollars. Returns null if the model is not supported.
|
// Returns the cost in dollars. Returns null if the model is not supported.
|
||||||
export const calculateLLMCost = ({
|
export const calculateLLMCost = ({
|
||||||
model,
|
model,
|
||||||
usage,
|
usage,
|
||||||
}: {
|
}: {
|
||||||
model: CustomLLMModel
|
model: LLMModel
|
||||||
usage: ResponseUsage
|
usage: ResponseUsage
|
||||||
}): number | null => {
|
}): number | null => {
|
||||||
switch (model.provider) {
|
const providerModels = GetProviderModels(model.provider)
|
||||||
case 'openai': {
|
if (!providerModels) {
|
||||||
const modelPricing = OPENAI_PRICES[model.name]
|
return null
|
||||||
if (!modelPricing) return null
|
|
||||||
return (
|
|
||||||
(usage.prompt_tokens * modelPricing.input +
|
|
||||||
usage.completion_tokens * modelPricing.output) /
|
|
||||||
1_000_000
|
|
||||||
)
|
|
||||||
}
|
|
||||||
case 'anthropic': {
|
|
||||||
const modelPricing = ANTHROPIC_PRICES[model.name]
|
|
||||||
if (!modelPricing) return null
|
|
||||||
return (
|
|
||||||
(usage.prompt_tokens * modelPricing.input +
|
|
||||||
usage.completion_tokens * modelPricing.output) /
|
|
||||||
1_000_000
|
|
||||||
)
|
|
||||||
}
|
|
||||||
case 'gemini': {
|
|
||||||
const modelPricing = GEMINI_PRICES[model.name]
|
|
||||||
if (!modelPricing) return null
|
|
||||||
return (
|
|
||||||
(usage.prompt_tokens * modelPricing.input +
|
|
||||||
usage.completion_tokens * modelPricing.output) /
|
|
||||||
1_000_000
|
|
||||||
)
|
|
||||||
}
|
|
||||||
case 'groq': {
|
|
||||||
const modelPricing = GROQ_PRICES[model.name]
|
|
||||||
if (!modelPricing) return null
|
|
||||||
return (
|
|
||||||
(usage.prompt_tokens * modelPricing.input +
|
|
||||||
usage.completion_tokens * modelPricing.output) /
|
|
||||||
1_000_000
|
|
||||||
)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return null
|
|
||||||
}
|
}
|
||||||
|
const modelInfo = providerModels[model.modelId]
|
||||||
|
if (!modelInfo) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
const cost = modelInfo.inputPrice * usage.prompt_tokens + modelInfo.outputPrice * usage.completion_tokens
|
||||||
|
return cost
|
||||||
}
|
}
|
||||||
|
|||||||
168
styles.css
168
styles.css
@ -1092,7 +1092,7 @@ input[type='text'].infio-chat-list-dropdown-item-title-input {
|
|||||||
justify-content: center;
|
justify-content: center;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
margin: 0;
|
margin-top: 5px !important; /* 使用 !important 强制生效 */
|
||||||
}
|
}
|
||||||
|
|
||||||
.infio-llm-model-settings-table .switch {
|
.infio-llm-model-settings-table .switch {
|
||||||
@ -1107,6 +1107,10 @@ input[type='text'].infio-chat-list-dropdown-item-title-input {
|
|||||||
margin-top: 20px;
|
margin-top: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.infio-provider {
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
.infio-llm-chat-setting-title {
|
.infio-llm-chat-setting-title {
|
||||||
border-top: 1px solid var(--background-modifier-border);
|
border-top: 1px solid var(--background-modifier-border);
|
||||||
}
|
}
|
||||||
@ -1119,9 +1123,171 @@ input[type='text'].infio-chat-list-dropdown-item-title-input {
|
|||||||
margin-bottom: 3px;
|
margin-bottom: 3px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-checkbox-name {
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--inline-title-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-trigger {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: var(--size-2-1) var(--size-4-1);
|
||||||
|
font-size: var(--font-ui-smaller);
|
||||||
|
color: var(--text-normal);
|
||||||
|
background-color: var(--background-primary);
|
||||||
|
border: 1px solid var(--background-modifier-border);
|
||||||
|
border-radius: var(--radius-s);
|
||||||
|
cursor: pointer;
|
||||||
|
width: 100%;
|
||||||
|
margin-bottom: var(--size-4-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-content {
|
||||||
|
overflow: hidden;
|
||||||
|
background-color: var(--background-primary);
|
||||||
|
border: 1px solid var(--background-modifier-border);
|
||||||
|
border-radius: var(--radius-s);
|
||||||
|
box-shadow: var(--shadow-s);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-item {
|
||||||
|
font-size: var(--font-ui-smaller);
|
||||||
|
color: var(--text-normal);
|
||||||
|
padding: var(--size-2-1) var(--size-4-2);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
cursor: pointer;
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-item:hover {
|
||||||
|
background-color: var(--background-modifier-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-item[data-highlighted] {
|
||||||
|
background-color: var(--background-modifier-hover);
|
||||||
|
color: var(--text-normal);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-select-indicator {
|
||||||
|
color: var(--text-accent);
|
||||||
|
padding-left: var(--size-4-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.iinfio-llm-setting-divider {
|
||||||
|
margin-top: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-slider-round {
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--inline-title-color);
|
||||||
|
}
|
||||||
|
|
||||||
.infio-llm-setting-item-control {
|
.infio-llm-setting-item-control {
|
||||||
width: 50%; /* Adjust the width as needed */
|
width: 50%; /* Adjust the width as needed */
|
||||||
max-width: 100%; /* Ensures it doesn't exceed the parent width */
|
max-width: 100%; /* Ensures it doesn't exceed the parent width */
|
||||||
|
background-color: var(--background-primary);
|
||||||
|
color: var(--text-normal);
|
||||||
|
border: 1px solid var(--background-modifier-border);
|
||||||
|
border-radius: var(--radius-s);
|
||||||
|
padding: var(--size-2-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-model-id {
|
||||||
|
color: var(--text-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Add hover and focus states for better interactivity */
|
||||||
|
.infio-llm-setting-item-control:hover {
|
||||||
|
border-color: var(--background-modifier-border-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-item-control:focus {
|
||||||
|
border-color: var(--background-modifier-border-focus);
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-combobox-dropdown {
|
||||||
|
margin-top: 4px;
|
||||||
|
max-height: 200px;
|
||||||
|
overflow-y: auto;
|
||||||
|
background-color: var(--background-primary);
|
||||||
|
color: var(--text-normal);
|
||||||
|
border: 1px solid var(--background-modifier-border);
|
||||||
|
border-radius: 0;
|
||||||
|
z-index: 1000;
|
||||||
|
padding: 2px 0;
|
||||||
|
box-shadow: var(--shadow-s);
|
||||||
|
width: var(--radix-popover-trigger-width);
|
||||||
|
min-width: var(--radix-popover-trigger-width);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* 添加容器样式使 select 和 input 在同一行 */
|
||||||
|
.infio-llm-setting-search-container {
|
||||||
|
display: flex;
|
||||||
|
gap: 2px;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-provider-switch {
|
||||||
|
width: 26%;
|
||||||
|
border-radius: 0;
|
||||||
|
margin: 0;
|
||||||
|
margin-left: 1px;
|
||||||
|
padding: 0;
|
||||||
|
background-color: var(--background-secondary);
|
||||||
|
/* outline: none; */
|
||||||
|
text-align: center;
|
||||||
|
text-align-last: center;
|
||||||
|
color: var(--text-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-provider-switch:focus {
|
||||||
|
/* border: none; */
|
||||||
|
outline: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-item-search {
|
||||||
|
width: 74%;
|
||||||
|
border: none;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
/* background-color: var(--background-secondary); */
|
||||||
|
outline: none;
|
||||||
|
border-radius: 0 !important;
|
||||||
|
-webkit-border-radius: 0 !important;
|
||||||
|
-moz-border-radius: 0 !important;
|
||||||
|
-ms-border-radius: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-item-search:focus {
|
||||||
|
border: none;
|
||||||
|
outline: none;
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-combobox-option {
|
||||||
|
padding: 8px 12px;
|
||||||
|
cursor: pointer;
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.infio-llm-setting-combobox-option:hover {
|
||||||
|
background-color: var(--background-modifier-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Highlight styles
|
||||||
|
*/
|
||||||
|
.infio-llm-setting-model-item-highlight {
|
||||||
|
background-color: var(--text-highlight-bg);
|
||||||
|
color: var(--text-normal);
|
||||||
|
border-radius: var(--radius-s);
|
||||||
|
padding: 0 2px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.infio-llm-setting-item-control::placeholder {
|
.infio-llm-setting-item-control::placeholder {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user