Yeah, this kinda looks okay for tool execution

This commit is contained in:
perf3ct 2025-04-13 20:12:17 +00:00
parent c9bb0fb219
commit f252f53e82
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
6 changed files with 205 additions and 157 deletions

View File

@ -70,7 +70,7 @@ export async function setupStreamingResponse(
return;
}
console.log(`[${responseId}] LLM Stream message received via CustomEvent: session=${sessionId}, content=${!!message.content}, contentLength=${message.content?.length || 0}, thinking=${!!message.thinking}, toolExecution=${!!message.toolExecution}, done=${!!message.done}`);
console.log(`[${responseId}] LLM Stream message received via CustomEvent: session=${sessionId}, content=${!!message.content}, contentLength=${message.content?.length || 0}, thinking=${!!message.thinking}, toolExecution=${!!message.toolExecution}, done=${!!message.done}, type=${message.type || 'llm-stream'}`);
// Mark first message received
if (!receivedAnyMessage) {
@ -84,12 +84,49 @@ export async function setupStreamingResponse(
}
}
// Handle specific message types
if (message.type === 'tool_execution_start') {
onThinkingUpdate('Executing tools...');
// Also trigger tool execution UI with a specific format
onToolExecution({
action: 'start',
tool: 'tools',
result: 'Executing tools...'
});
return; // Skip accumulating content from this message
}
if (message.type === 'tool_result' && message.toolExecution) {
onToolExecution(message.toolExecution);
return; // Skip accumulating content from this message
}
if (message.type === 'tool_execution_error' && message.toolExecution) {
onToolExecution({
...message.toolExecution,
action: 'error',
error: message.toolExecution.error || 'Unknown error during tool execution'
});
return; // Skip accumulating content from this message
}
if (message.type === 'tool_completion_processing') {
onThinkingUpdate('Generating response with tool results...');
// Also trigger tool execution UI with a specific format
onToolExecution({
action: 'generating',
tool: 'tools',
result: 'Generating response with tool results...'
});
return; // Skip accumulating content from this message
}
// Handle content updates
if (message.content) {
receivedAnyContent = true;
console.log(`[${responseId}] Received content chunk of length ${message.content.length}, preview: "${message.content.substring(0, 50)}${message.content.length > 50 ? '...' : ''}"`);
// Add to our accumulated response
assistantResponse += message.content;
@ -111,10 +148,13 @@ export async function setupStreamingResponse(
}, 30000);
}
// Handle tool execution updates
// Handle tool execution updates (legacy format and standard format with llm-stream type)
if (message.toolExecution) {
console.log(`[${responseId}] Received tool execution update: action=${message.toolExecution.action || 'unknown'}`);
onToolExecution(message.toolExecution);
// Only process if we haven't already handled this message via specific message types
if (message.type === 'llm-stream' || !message.type) {
console.log(`[${responseId}] Received tool execution update: action=${message.toolExecution.action || 'unknown'}`);
onToolExecution(message.toolExecution);
}
}
// Handle thinking state updates

View File

@ -7,10 +7,10 @@ import appContext from "../../components/app_context.js";
import server from "../../services/server.js";
import libraryLoader from "../../services/library_loader.js";
import { TPL, addMessageToChat, showSources, hideSources, showLoadingIndicator, hideLoadingIndicator, renderToolStepsHtml } from "./ui.js";
import { TPL, addMessageToChat, showSources, hideSources, showLoadingIndicator, hideLoadingIndicator } from "./ui.js";
import { formatMarkdown } from "./utils.js";
import { createChatSession, checkSessionExists, setupStreamingResponse, getDirectResponse } from "./communication.js";
import { extractToolExecutionSteps, extractFinalResponse, extractInChatToolSteps } from "./message_processor.js";
import { extractInChatToolSteps } from "./message_processor.js";
import { validateEmbeddingProviders } from "./validation.js";
import type { MessageData, ToolExecutionStep, ChatData } from "./types.js";
import { applySyntaxHighlight } from "../../services/syntax_highlight.js";
@ -245,7 +245,7 @@ export default class LlmChatPanel extends BasicWidget {
</button>
</div>
<div class="tool-execution-chat-steps">
${renderToolStepsHtml(steps)}
${this.renderToolStepsHtml(steps)}
</div>
</div>
`;
@ -261,6 +261,52 @@ export default class LlmChatPanel extends BasicWidget {
}
}
/**
* Render HTML for tool execution steps
*/
private renderToolStepsHtml(steps: ToolExecutionStep[]): string {
if (!steps || steps.length === 0) return '';
return steps.map(step => {
let icon = 'bx-info-circle';
let className = 'info';
let content = '';
if (step.type === 'executing') {
icon = 'bx-code-block';
className = 'executing';
content = `<div>${step.content || 'Executing tools...'}</div>`;
} else if (step.type === 'result') {
icon = 'bx-terminal';
className = 'result';
content = `
<div>Tool: <strong>${step.name || 'unknown'}</strong></div>
<div class="mt-1 ps-3">${step.content || ''}</div>
`;
} else if (step.type === 'error') {
icon = 'bx-error-circle';
className = 'error';
content = `
<div>Tool: <strong>${step.name || 'unknown'}</strong></div>
<div class="mt-1 ps-3 text-danger">${step.content || 'Error occurred'}</div>
`;
} else if (step.type === 'generating') {
icon = 'bx-message-dots';
className = 'generating';
content = `<div>${step.content || 'Generating response...'}</div>`;
}
return `
<div class="tool-step ${className} p-2 mb-2 rounded">
<div class="d-flex align-items-center">
<i class="bx ${icon} me-2"></i>
${content}
</div>
</div>
`;
}).join('');
}
async refresh() {
if (!this.isVisible()) {
return;
@ -493,10 +539,10 @@ export default class LlmChatPanel extends BasicWidget {
}
/**
* Update the UI with streaming content as it arrives
* Update the UI with streaming content
*/
private updateStreamingUI(assistantResponse: string) {
const logId = `ui-update-${Date.now()}`;
const logId = `LlmChatPanel-${Date.now()}`;
console.log(`[${logId}] Updating UI with response text: ${assistantResponse.length} chars`);
if (!this.noteContextChatMessages) {
@ -504,70 +550,19 @@ export default class LlmChatPanel extends BasicWidget {
return;
}
// Extract the tool execution steps and final response
const toolSteps = extractToolExecutionSteps(assistantResponse);
const finalResponseText = extractFinalResponse(assistantResponse);
// With our new structured message approach, we don't need to extract tool steps from
// the assistantResponse anymore, as tool execution is handled separately via dedicated messages
// Find existing assistant message or create one if needed
let assistantElement = this.noteContextChatMessages.querySelector('.assistant-message:last-child .message-content');
// First, check if we need to add the tool execution steps to the chat flow
if (toolSteps.length > 0) {
// Look for an existing tool execution element in the chat flow
let toolExecutionElement = this.noteContextChatMessages.querySelector('.chat-tool-execution');
if (!toolExecutionElement) {
// Create a new tool execution element in the chat flow
// Place it right before the assistant message if it exists, or at the end of chat
toolExecutionElement = document.createElement('div');
toolExecutionElement.className = 'chat-tool-execution mb-3';
// If there's an assistant message, insert before it
const assistantMessage = this.noteContextChatMessages.querySelector('.assistant-message:last-child');
if (assistantMessage) {
this.noteContextChatMessages.insertBefore(toolExecutionElement, assistantMessage);
} else {
// Otherwise append to the end
this.noteContextChatMessages.appendChild(toolExecutionElement);
}
}
// Update the tool execution content
toolExecutionElement.innerHTML = `
<div class="tool-execution-container p-2 rounded mb-2">
<div class="tool-execution-header d-flex align-items-center justify-content-between mb-2">
<div>
<i class="bx bx-code-block text-primary me-2"></i>
<span class="fw-bold">Tool Execution</span>
</div>
<button type="button" class="btn btn-sm btn-link p-0 text-muted tool-execution-chat-clear" title="Clear tool execution history">
<i class="bx bx-x"></i>
</button>
</div>
<div class="tool-execution-chat-steps">
${renderToolStepsHtml(toolSteps)}
</div>
</div>
`;
// Add event listener for the clear button
const clearButton = toolExecutionElement.querySelector('.tool-execution-chat-clear');
if (clearButton) {
clearButton.addEventListener('click', (e) => {
e.preventDefault();
e.stopPropagation();
toolExecutionElement?.remove();
});
}
}
// Now update or create the assistant message with the final response
if (finalResponseText) {
// Now update or create the assistant message with the response
if (assistantResponse) {
if (assistantElement) {
console.log(`[${logId}] Found existing assistant message element, updating with final response`);
console.log(`[${logId}] Found existing assistant message element, updating with response`);
try {
// Format the final response with markdown
const formattedResponse = formatMarkdown(finalResponseText);
// Format the response with markdown
const formattedResponse = formatMarkdown(assistantResponse);
// Update the content
assistantElement.innerHTML = formattedResponse || '';
@ -575,12 +570,12 @@ export default class LlmChatPanel extends BasicWidget {
// Apply syntax highlighting to any code blocks in the updated content
applySyntaxHighlight($(assistantElement as HTMLElement));
console.log(`[${logId}] Successfully updated existing element with final response`);
console.log(`[${logId}] Successfully updated existing element with response`);
} catch (err) {
console.error(`[${logId}] Error updating existing element:`, err);
// Fallback to text content if HTML update fails
try {
assistantElement.textContent = finalResponseText;
assistantElement.textContent = assistantResponse;
console.log(`[${logId}] Fallback to text content successful`);
} catch (fallbackErr) {
console.error(`[${logId}] Even fallback update failed:`, fallbackErr);
@ -589,7 +584,7 @@ export default class LlmChatPanel extends BasicWidget {
} else {
console.log(`[${logId}] No existing assistant message element found, creating new one`);
// Create a new message in the chat
this.addMessageToChat('assistant', finalResponseText);
this.addMessageToChat('assistant', assistantResponse);
console.log(`[${logId}] Successfully added new assistant message`);
}
}
@ -683,7 +678,9 @@ export default class LlmChatPanel extends BasicWidget {
if (!stepsContainer) return;
// Process based on action type
if (toolExecutionData.action === 'start') {
const action = toolExecutionData.action || '';
if (action === 'start' || action === 'executing') {
// Tool execution started
const step = document.createElement('div');
step.className = 'tool-step executing p-2 mb-2 rounded';
@ -692,21 +689,47 @@ export default class LlmChatPanel extends BasicWidget {
<i class="bx bx-code-block me-2"></i>
<span>Executing tool: <strong>${toolExecutionData.tool || 'unknown'}</strong></span>
</div>
${toolExecutionData.args ? `
<div class="tool-args mt-1 ps-3">
<code>Args: ${JSON.stringify(toolExecutionData.args || {}, null, 2)}</code>
</div>
</div>` : ''}
`;
stepsContainer.appendChild(step);
}
else if (toolExecutionData.action === 'result') {
else if (action === 'result' || action === 'complete') {
// Tool execution completed with results
const step = document.createElement('div');
step.className = 'tool-step result p-2 mb-2 rounded';
let resultDisplay = '';
// Format the result based on type
if (typeof toolExecutionData.result === 'object') {
// Special handling for search_notes tool which has a specific structure
if (toolExecutionData.tool === 'search_notes' &&
typeof toolExecutionData.result === 'object' &&
toolExecutionData.result.results) {
const results = toolExecutionData.result.results;
if (results.length === 0) {
resultDisplay = `<div class="text-muted">No notes found matching the search criteria.</div>`;
} else {
resultDisplay = `
<div class="search-results">
<div class="mb-2">Found ${results.length} notes:</div>
<ul class="list-unstyled ps-1">
${results.map((note: any) => `
<li class="mb-1">
<a href="#" class="note-link" data-note-id="${note.noteId}">${note.title}</a>
${note.similarity < 1 ? `<span class="text-muted small ms-1">(similarity: ${(note.similarity * 100).toFixed(0)}%)</span>` : ''}
</li>
`).join('')}
</ul>
</div>
`;
}
}
// Format the result based on type for other tools
else if (typeof toolExecutionData.result === 'object') {
// For objects, format as pretty JSON
resultDisplay = `<pre class="mb-0"><code>${JSON.stringify(toolExecutionData.result, null, 2)}</code></pre>`;
} else {
@ -724,8 +747,23 @@ export default class LlmChatPanel extends BasicWidget {
</div>
`;
stepsContainer.appendChild(step);
// Add event listeners for note links if this is a search_notes result
if (toolExecutionData.tool === 'search_notes') {
const noteLinks = step.querySelectorAll('.note-link');
noteLinks.forEach(link => {
link.addEventListener('click', (e) => {
e.preventDefault();
const noteId = (e.currentTarget as HTMLElement).getAttribute('data-note-id');
if (noteId) {
// Open the note in a new tab but don't switch to it
appContext.tabManager.openTabWithNoteWithHoisting(noteId, { activate: false });
}
});
});
}
}
else if (toolExecutionData.action === 'error') {
else if (action === 'error') {
// Tool execution failed
const step = document.createElement('div');
step.className = 'tool-step error p-2 mb-2 rounded';
@ -740,6 +778,18 @@ export default class LlmChatPanel extends BasicWidget {
`;
stepsContainer.appendChild(step);
}
else if (action === 'generating') {
// Generating final response with tool results
const step = document.createElement('div');
step.className = 'tool-step generating p-2 mb-2 rounded';
step.innerHTML = `
<div class="d-flex align-items-center">
<i class="bx bx-message-dots me-2"></i>
<span>Generating response with tool results...</span>
</div>
`;
stepsContainer.appendChild(step);
}
// Make sure the loading indicator is shown during tool execution
this.loadingIndicator.style.display = 'flex';

View File

@ -3,66 +3,6 @@
*/
import type { ToolExecutionStep } from "./types.js";
/**
* Extract tool execution steps from the response
*/
export function extractToolExecutionSteps(content: string): ToolExecutionStep[] {
if (!content) return [];
const steps: ToolExecutionStep[] = [];
// Check for executing tools marker
if (content.includes('[Executing tools...]')) {
steps.push({
type: 'executing',
content: 'Executing tools...'
});
}
// Extract tool results with regex
const toolResultRegex = /\[Tool: ([^\]]+)\]([\s\S]*?)(?=\[|$)/g;
let match;
while ((match = toolResultRegex.exec(content)) !== null) {
const toolName = match[1];
const toolContent = match[2].trim();
steps.push({
type: toolContent.includes('Error:') ? 'error' : 'result',
name: toolName,
content: toolContent
});
}
// Check for generating response marker
if (content.includes('[Generating response with tool results...]')) {
steps.push({
type: 'generating',
content: 'Generating response with tool results...'
});
}
return steps;
}
/**
* Extract the final response without tool execution steps
*/
export function extractFinalResponse(content: string): string {
if (!content) return '';
// Remove all tool execution markers and their content
let finalResponse = content
.replace(/\[Executing tools\.\.\.\]\n*/g, '')
.replace(/\[Tool: [^\]]+\][\s\S]*?(?=\[|$)/g, '')
.replace(/\[Generating response with tool results\.\.\.\]\n*/g, '');
// Trim any extra whitespace
finalResponse = finalResponse.trim();
return finalResponse;
}
/**
* Extract tool execution steps from the DOM that are within the chat flow
*/

View File

@ -345,9 +345,10 @@ export class ChatPipeline {
// If streaming was enabled, send an update to the user
if (isStreaming && streamCallback) {
streamingPaused = true;
// IMPORTANT: Don't send done:true here, as it causes the client to stop processing messages
// Instead, send a marker message that indicates tools will be executed
streamCallback('\n\n[Executing tools...]\n\n', false);
// Send a dedicated message with a specific type for tool execution
streamCallback('', false, {
type: 'tool_execution_start'
});
}
while (toolCallIterations < maxToolCallIterations) {
@ -406,6 +407,7 @@ export class ChatPipeline {
// Send the structured tool result directly so the client has the raw data
streamCallback('', false, {
type: 'tool_result',
toolExecution: {
action: 'result',
tool: toolName,
@ -414,15 +416,21 @@ export class ChatPipeline {
}
});
// Still send the formatted text for backwards compatibility
// This will be phased out once the client is updated
const formattedToolResult = `[Tool: ${toolName || 'unknown'}]\n${msg.content}\n\n`;
streamCallback(formattedToolResult, false);
// No longer need to send formatted text version
// The client should use the structured data instead
} catch (err) {
log.error(`Error sending structured tool result: ${err}`);
// Fall back to the old format if there's an error
const formattedToolResult = `[Tool: ${toolName || 'unknown'}]\n${msg.content}\n\n`;
streamCallback(formattedToolResult, false);
// Use structured format here too instead of falling back to text format
streamCallback('', false, {
type: 'tool_result',
toolExecution: {
action: 'result',
tool: toolName || 'unknown',
toolCallId: msg.tool_call_id,
result: msg.content,
error: String(err)
}
});
}
}
});
@ -437,7 +445,9 @@ export class ChatPipeline {
// If streaming, show progress to the user
if (isStreaming && streamCallback) {
streamCallback('[Generating response with tool results...]\n\n', false);
streamCallback('', false, {
type: 'tool_completion_processing'
});
}
// Extract tool execution status information for Ollama feedback
@ -513,7 +523,13 @@ export class ChatPipeline {
// If streaming, show error to the user
if (isStreaming && streamCallback) {
streamCallback(`[Tool execution error: ${error.message || 'unknown error'}]\n\n`, false);
streamCallback('', false, {
type: 'tool_execution_error',
toolExecution: {
action: 'error',
error: error.message || 'unknown error'
}
});
}
// For Ollama, create tool execution status with the error

View File

@ -6,14 +6,15 @@ import type { Message, ChatCompletionOptions, ChatResponse, StreamChunk } from "
* Interface for WebSocket LLM streaming messages
*/
interface LLMStreamMessage {
type: 'llm-stream';
type: 'llm-stream' | 'tool_execution_start' | 'tool_result' | 'tool_execution_error' | 'tool_completion_processing';
sessionId: string;
content?: string;
thinking?: string;
toolExecution?: {
action?: string;
tool?: string;
result?: string;
toolCallId?: string;
result?: string | Record<string, any>;
error?: string;
args?: Record<string, unknown>;
};
@ -1165,7 +1166,7 @@ class RestChatService {
// Enhanced logging for each chunk
log.info(`Received stream chunk from ${service.getName()} with ${chunk.text.length} chars of text, done=${!!chunk.done}`);
// Send each individual chunk via WebSocket as it arrives
wsService.sendMessageToAllClients({
type: 'llm-stream',
@ -1214,7 +1215,7 @@ class RestChatService {
content: messageContent, // Send the accumulated content
done: true
} as LLMStreamMessage);
log.info(`Sent explicit final completion message with accumulated content`);
} else {
log.info(`Final done flag was already sent with content chunk, no need for extra message`);

View File

@ -56,7 +56,7 @@ interface Message {
originEntityId?: string | null;
lastModifiedMs?: number;
filePath?: string;
// LLM streaming specific fields
sessionId?: string;
content?: string;
@ -64,7 +64,8 @@ interface Message {
toolExecution?: {
action?: string;
tool?: string;
result?: string;
toolCallId?: string;
result?: string | Record<string, any>;
error?: string;
args?: Record<string, unknown>;
};
@ -144,7 +145,7 @@ function sendMessageToAllClients(message: Message) {
clientCount++;
}
});
// Log WebSocket client count for debugging
if (message.type === "llm-stream") {
log.info(`[WS-SERVER] Sent LLM stream message to ${clientCount} clients`);