mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-08-12 20:02:28 +08:00
handle streaming responses better in Chat UI
This commit is contained in:
parent
208267edb8
commit
8ff3f4f269
@ -366,12 +366,19 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
showThinking
|
showThinking
|
||||||
};
|
};
|
||||||
|
|
||||||
// First try to get a direct response
|
// First try to use streaming (preferred method)
|
||||||
const handled = await this.handleDirectResponse(messageParams);
|
try {
|
||||||
if (handled) return;
|
await this.setupStreamingResponse(messageParams);
|
||||||
|
} catch (streamingError) {
|
||||||
|
console.warn("Streaming request failed, falling back to direct response:", streamingError);
|
||||||
|
|
||||||
// If no direct response, set up streaming
|
// If streaming fails, fall back to direct response
|
||||||
await this.setupStreamingResponse(messageParams);
|
const handled = await this.handleDirectResponse(messageParams);
|
||||||
|
if (!handled) {
|
||||||
|
// If neither method works, show an error
|
||||||
|
throw new Error("Failed to get response from server");
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.handleError(error as Error);
|
this.handleError(error as Error);
|
||||||
}
|
}
|
||||||
@ -402,23 +409,28 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
* @returns true if response was handled, false if streaming should be used
|
* @returns true if response was handled, false if streaming should be used
|
||||||
*/
|
*/
|
||||||
private async handleDirectResponse(messageParams: any): Promise<boolean> {
|
private async handleDirectResponse(messageParams: any): Promise<boolean> {
|
||||||
// Send the message via POST request
|
try {
|
||||||
const postResponse = await server.post<any>(`llm/sessions/${this.sessionId}/messages`, messageParams);
|
// Send the message via POST request
|
||||||
|
const postResponse = await server.post<any>(`llm/sessions/${this.sessionId}/messages`, messageParams);
|
||||||
|
|
||||||
// If the POST request returned content directly, display it
|
// If the POST request returned content directly, display it
|
||||||
if (postResponse && postResponse.content) {
|
if (postResponse && postResponse.content) {
|
||||||
this.processAssistantResponse(postResponse.content);
|
this.processAssistantResponse(postResponse.content);
|
||||||
|
|
||||||
// If there are sources, show them
|
// If there are sources, show them
|
||||||
if (postResponse.sources && postResponse.sources.length > 0) {
|
if (postResponse.sources && postResponse.sources.length > 0) {
|
||||||
this.showSources(postResponse.sources);
|
this.showSources(postResponse.sources);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.hideLoadingIndicator();
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.hideLoadingIndicator();
|
return false;
|
||||||
return true;
|
} catch (error) {
|
||||||
|
console.error("Error with direct response:", error);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -444,36 +456,85 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
/**
|
/**
|
||||||
* Set up streaming response from the server
|
* Set up streaming response from the server
|
||||||
*/
|
*/
|
||||||
private async setupStreamingResponse(messageParams: any) {
|
private async setupStreamingResponse(messageParams: any): Promise<void> {
|
||||||
const useAdvancedContext = messageParams.useAdvancedContext;
|
const useAdvancedContext = messageParams.useAdvancedContext;
|
||||||
const showThinking = messageParams.showThinking;
|
const showThinking = messageParams.showThinking;
|
||||||
|
|
||||||
// Set up streaming via EventSource
|
// Set up streaming via EventSource
|
||||||
const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`;
|
const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`;
|
||||||
const source = new EventSource(streamUrl);
|
|
||||||
|
|
||||||
let assistantResponse = '';
|
return new Promise((resolve, reject) => {
|
||||||
let receivedAnyContent = false;
|
const source = new EventSource(streamUrl);
|
||||||
let timeoutId: number | null = null;
|
let assistantResponse = '';
|
||||||
|
let receivedAnyContent = false;
|
||||||
|
let timeoutId: number | null = null;
|
||||||
|
|
||||||
// Set up timeout for streaming response
|
// Set up timeout for streaming response
|
||||||
timeoutId = this.setupStreamingTimeout(source);
|
timeoutId = this.setupStreamingTimeout(source);
|
||||||
|
|
||||||
// Handle streaming response
|
// Handle streaming response
|
||||||
source.onmessage = (event) => this.handleStreamingMessage(
|
source.onmessage = (event) => {
|
||||||
event,
|
try {
|
||||||
source,
|
if (event.data === '[DONE]') {
|
||||||
timeoutId,
|
// Stream completed successfully
|
||||||
assistantResponse,
|
this.handleStreamingComplete(source, timeoutId, receivedAnyContent, assistantResponse);
|
||||||
receivedAnyContent
|
resolve();
|
||||||
);
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Handle streaming errors
|
const data = JSON.parse(event.data);
|
||||||
source.onerror = () => this.handleStreamingError(
|
console.log("Received streaming data:", data);
|
||||||
source,
|
|
||||||
timeoutId,
|
// Handle both content and error cases
|
||||||
receivedAnyContent
|
if (data.content) {
|
||||||
);
|
receivedAnyContent = true;
|
||||||
|
assistantResponse += data.content;
|
||||||
|
|
||||||
|
// Update the UI with the accumulated response
|
||||||
|
this.updateStreamingUI(assistantResponse);
|
||||||
|
} else if (data.error) {
|
||||||
|
// Handle error message
|
||||||
|
this.hideLoadingIndicator();
|
||||||
|
this.addMessageToChat('assistant', `Error: ${data.error}`);
|
||||||
|
|
||||||
|
if (timeoutId !== null) {
|
||||||
|
window.clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
|
||||||
|
source.close();
|
||||||
|
reject(new Error(data.error));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scroll to the bottom
|
||||||
|
this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error parsing SSE message:', e, 'Raw data:', event.data);
|
||||||
|
reject(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle streaming errors
|
||||||
|
source.onerror = (err) => {
|
||||||
|
console.error("EventSource error:", err);
|
||||||
|
source.close();
|
||||||
|
this.hideLoadingIndicator();
|
||||||
|
|
||||||
|
// Clear the timeout if there was an error
|
||||||
|
if (timeoutId !== null) {
|
||||||
|
window.clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only reject if we haven't received any content yet
|
||||||
|
if (!receivedAnyContent) {
|
||||||
|
reject(new Error('Error connecting to the LLM streaming service'));
|
||||||
|
} else {
|
||||||
|
// If we've already received some content, consider it a successful but incomplete response
|
||||||
|
this.handleStreamingComplete(source, timeoutId, receivedAnyContent, assistantResponse);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -492,51 +553,6 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
}, 10000);
|
}, 10000);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle messages from the streaming response
|
|
||||||
*/
|
|
||||||
private handleStreamingMessage(
|
|
||||||
event: MessageEvent,
|
|
||||||
source: EventSource,
|
|
||||||
timeoutId: number | null,
|
|
||||||
assistantResponse: string,
|
|
||||||
receivedAnyContent: boolean
|
|
||||||
) {
|
|
||||||
if (event.data === '[DONE]') {
|
|
||||||
this.handleStreamingComplete(source, timeoutId, receivedAnyContent, assistantResponse);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const data = JSON.parse(event.data);
|
|
||||||
console.log("Received streaming data:", data); // Debug log
|
|
||||||
|
|
||||||
// Handle both content and error cases
|
|
||||||
if (data.content) {
|
|
||||||
receivedAnyContent = true;
|
|
||||||
assistantResponse += data.content;
|
|
||||||
|
|
||||||
// Update the UI with the accumulated response
|
|
||||||
this.updateStreamingUI(assistantResponse);
|
|
||||||
} else if (data.error) {
|
|
||||||
// Handle error message
|
|
||||||
this.hideLoadingIndicator();
|
|
||||||
this.addMessageToChat('assistant', `Error: ${data.error}`);
|
|
||||||
receivedAnyContent = true;
|
|
||||||
source.close();
|
|
||||||
|
|
||||||
if (timeoutId !== null) {
|
|
||||||
window.clearTimeout(timeoutId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Scroll to the bottom
|
|
||||||
this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Error parsing SSE message:', e, 'Raw data:', event.data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update the UI with streaming content as it arrives
|
* Update the UI with streaming content as it arrives
|
||||||
*/
|
*/
|
||||||
@ -607,8 +623,9 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
|
|
||||||
// Only show error message if we haven't received any content yet
|
// Only show error message if we haven't received any content yet
|
||||||
if (!receivedAnyContent) {
|
if (!receivedAnyContent) {
|
||||||
const connectionError = 'Error connecting to the LLM service. Please try again.';
|
// Instead of automatically showing the error message in the chat,
|
||||||
this.processAssistantResponse(connectionError);
|
// throw an error so the parent function can handle the fallback
|
||||||
|
throw new Error('Error connecting to the LLM streaming service');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user