Update to version 0.7.4, fix CORS errors for the Moonshot provider, add BM25 search support, and document related changes in the CHANGELOG.

This commit is contained in:
duanfuxiang 2025-07-15 22:41:21 +08:00
parent c0cd2ccf4d
commit d99ea8f2f6
6 changed files with 27 additions and 9 deletions

View File

@ -1,4 +1,8 @@
releases:
- version: "0.7.4"
fixes:
- "fix moonshot provider cors error"
- "add bm25 search support"
- version: "0.7.3"
features:
- "add idb support"

View File

@ -1,7 +1,7 @@
{
"id": "infio-copilot",
"name": "Infio Copilot",
"version": "0.7.3",
"version": "0.7.4",
"minAppVersion": "0.15.0",
"description": "A Cursor-inspired AI assistant for notes that offers smart autocomplete and interactive chat with your selected notes",
"author": "Felix.D",

View File

@ -1,6 +1,6 @@
{
"name": "obsidian-infio-copilot",
"version": "0.7.3",
"version": "0.7.4",
"description": "A Cursor-inspired AI assistant that offers smart autocomplete and interactive chat with your selected notes",
"main": "main.js",
"scripts": {

View File

@ -19,7 +19,7 @@ import { OpenAIMessageAdapter } from './openai-message-adapter'
export class OpenAICompatibleProvider implements BaseLLMProvider {
private adapter: OpenAIMessageAdapter
private client: OpenAI
private client: OpenAI | NoStainlessOpenAI
private apiKey: string
private baseURL: string
@ -54,8 +54,8 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
}
// 获取提供商特定的额外参数
private getExtraParams(isStreaming: boolean): Record<string, any> {
const extraParams: Record<string, any> = {}
private getExtraParams(isStreaming: boolean): Record<string, unknown> {
const extraParams: Record<string, unknown> = {}
// 阿里云Qwen API需要在非流式调用中设置 enable_thinking: false
if (this.isAlibabaQwen() && !isStreaming) {
@ -77,7 +77,7 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
}
const extraParams = this.getExtraParams(false) // 非流式调用
return this.adapter.generateResponse(this.client, request, options, extraParams)
return this.adapter.generateResponse(this.client as OpenAI, request, options, extraParams)
}
async streamResponse(
@ -92,6 +92,6 @@ export class OpenAICompatibleProvider implements BaseLLMProvider {
}
const extraParams = this.getExtraParams(true) // 流式调用
return this.adapter.streamResponse(this.client, request, options, extraParams)
return this.adapter.streamResponse(this.client as OpenAI, request, options, extraParams)
}
}

View File

@ -22,7 +22,7 @@ export class OpenAIMessageAdapter {
client: OpenAI,
request: LLMRequestNonStreaming,
options?: LLMOptions,
extraParams?: Record<string, any>,
extraParams?: Record<string, unknown>,
): Promise<LLMResponseNonStreaming> {
const response = await client.chat.completions.create(
{
@ -50,7 +50,7 @@ export class OpenAIMessageAdapter {
client: OpenAI,
request: LLMRequestStreaming,
options?: LLMOptions,
extraParams?: Record<string, any>,
extraParams?: Record<string, unknown>,
): Promise<AsyncIterable<LLMResponseStreaming>> {
const stream = await client.chat.completions.create(
{

View File

@ -216,6 +216,13 @@ describe('parseSmartCopilotSettings', () => {
useCustomUrl: false,
models: [],
},
moonshotProvider: {
name: 'Moonshot',
apiKey: '',
baseUrl: '',
useCustomUrl: false,
models: [],
},
siliconflowProvider: {
name: 'SiliconFlow',
apiKey: '',
@ -458,6 +465,13 @@ describe('settings migration', () => {
useCustomUrl: false,
models: [],
},
moonshotProvider: {
name: 'Moonshot',
apiKey: '',
baseUrl: '',
useCustomUrl: false,
models: [],
},
siliconflowProvider: {
name: 'SiliconFlow',
apiKey: '',