udpate , use stream mode all the time

This commit is contained in:
duanfuxiang 2025-04-30 19:23:20 +08:00
parent 61b42a8a07
commit db34038acc
4 changed files with 54 additions and 16 deletions

View File

@ -1,4 +1,4 @@
import { TFile, View, WorkspaceLeaf } from 'obsidian'
import { View, WorkspaceLeaf } from 'obsidian'
import { Root, createRoot } from 'react-dom/client'
import ApplyViewRoot from './components/apply-view/ApplyViewRoot'
@ -27,7 +27,7 @@ export class ApplyView extends View {
}
getDisplayText() {
return `Applying: ${this.state?.file?.name ?? ''}`
return `Applying: ${this.state?.file ?? ''}`
}
async setState(state: ApplyViewState) {

View File

@ -128,7 +128,7 @@ export default function ApplyViewRoot({
</div>
<div className="view-header-title-container mod-at-start">
<div className="view-header-title">
Applying: {state?.file?.name ?? ''}
Applying: {state?.file ?? ''}
</div>
<div className="view-actions">
<button
@ -157,8 +157,8 @@ export default function ApplyViewRoot({
<div className="cm-scroller">
<div className="cm-sizer">
<div className="infio-inline-title">
{state?.file?.name
? state.file.name.replace(/\.[^/.]+$/, '')
{state?.file
? state.file.replace(/\.[^/.]+$/, '')
: ''}
</div>

View File

@ -217,21 +217,34 @@ export const InlineEdit: React.FC<InlineEditProps> = ({
endLine: defaultEndLine,
});
const response = await llmManager.generateResponse(chatModel, {
model: chatModel.modelId,
messages: requestMessages,
stream: false,
});
const stream = await llmManager.streamResponse(
chatModel,
{
messages: requestMessages,
model: chatModel.modelId,
max_tokens: settings.modelOptions.max_tokens,
temperature: settings.modelOptions.temperature,
// top_p: settings.modelOptions.top_p,
// frequency_penalty: settings.modelOptions.frequency_penalty,
// presence_penalty: settings.modelOptions.presence_penalty,
stream: true,
}
)
if (!response.choices[0].message.content) {
let response_content = ""
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content ?? ''
response_content += content
}
if (!response_content) {
setIsSubmitting(false);
throw new Error("Empty response from LLM");
}
const parsedBlock = parseSmartComposeBlock(
response.choices[0].message.content
response_content
);
const finalContent = parsedBlock?.content || response.choices[0].message.content;
const finalContent = parsedBlock?.content || response_content;
if (!activeFile || !(activeFile.path && typeof activeFile.path === 'string')) {
setIsSubmitting(false);

View File

@ -33,12 +33,37 @@ class LLMClient {
}
async queryChatModel(messages: RequestMessage[]): Promise<Result<string, Error>> {
const data = await this.llm.generateResponse(this.model, {
const stream = await this.llm.streamResponse(
this.model,
{
messages: messages,
model: this.model.modelId,
stream: true,
}
)
let response_content = ""
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content ?? ''
response_content += content
}
return ok(response_content);
}
async queryChatModelStream(messages: RequestMessage[]): Promise<AsyncIterable<string>> {
const stream = await this.llm.streamResponse(this.model, {
model: this.model.modelId,
messages: messages,
stream: false,
stream: true,
})
return ok(data.choices[0].message.content);
// eslint-disable-next-line no-inner-declarations
async function* streamResponse(): AsyncIterable<string> {
for await (const chunk of stream) {
yield chunk.choices[0].delta.content;
}
}
return streamResponse()
}
}