diff --git a/src/public/app/widgets/llm_chat/communication.ts b/src/public/app/widgets/llm_chat/communication.ts index 19f3f7a9c..37a7012ec 100644 --- a/src/public/app/widgets/llm_chat/communication.ts +++ b/src/public/app/widgets/llm_chat/communication.ts @@ -28,9 +28,9 @@ export async function createChatSession(): Promise { */ export async function checkSessionExists(sessionId: string): Promise { try { - const sessionCheck = await server.get(`llm/sessions/${sessionId}`); + const sessionCheck = await server.getWithSilentNotFound(`llm/sessions/${sessionId}`); return !!(sessionCheck && sessionCheck.id); - } catch (error) { + } catch (error: any) { console.log(`Error checking session ${sessionId}:`, error); return false; } diff --git a/src/public/app/widgets/llm_chat/llm_chat_panel.ts b/src/public/app/widgets/llm_chat/llm_chat_panel.ts index cc7e55dc4..0f5b18919 100644 --- a/src/public/app/widgets/llm_chat/llm_chat_panel.ts +++ b/src/public/app/widgets/llm_chat/llm_chat_panel.ts @@ -170,13 +170,13 @@ export default class LlmChatPanel extends BasicWidget { error?: string; timestamp: string; }> = []; - + // First include any tool executions already in metadata (from streaming events) if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) { toolExecutions = [...this.metadata.toolExecutions]; console.log(`Including ${toolExecutions.length} tool executions from metadata`); } - + // Also extract any visible tool steps from the UI const extractedExecutions = toolSteps.map(step => { // Parse tool execution information @@ -219,7 +219,7 @@ export default class LlmChatPanel extends BasicWidget { timestamp: new Date().toISOString() }; }); - + // Merge the tool executions, keeping only unique IDs const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id)); for (const exec of extractedExecutions) { @@ -287,7 +287,7 @@ export default class LlmChatPanel extends BasicWidget { if (savedData.sources && Array.isArray(savedData.sources)) { this.sources = savedData.sources; console.log(`Loaded ${this.sources.length} sources from saved data`); - + // Show sources in the UI if they exist if (this.sources.length > 0) { this.showSources(this.sources); @@ -300,19 +300,19 @@ export default class LlmChatPanel extends BasicWidget { ...this.metadata, ...savedData.metadata }; - + // Ensure tool executions are loaded if (savedData.metadata.toolExecutions && Array.isArray(savedData.metadata.toolExecutions)) { console.log(`Loaded ${savedData.metadata.toolExecutions.length} tool executions from saved data`); - + if (!this.metadata.toolExecutions) { this.metadata.toolExecutions = []; } - + // Make sure we don't lose any tool executions this.metadata.toolExecutions = savedData.metadata.toolExecutions; } - + console.log(`Loaded metadata from saved data:`, this.metadata); } @@ -325,10 +325,10 @@ export default class LlmChatPanel extends BasicWidget { if (sessionExists) { console.log(`Restored session ${savedData.sessionId}`); this.sessionId = savedData.sessionId; - + // If we successfully restored a session, also fetch the latest session data try { - const sessionData = await server.get<{ + const sessionData = await server.getWithSilentNotFound<{ metadata?: { model?: string; provider?: string; @@ -356,6 +356,7 @@ export default class LlmChatPanel extends BasicWidget { content?: string; }>; }>(`llm/sessions/${savedData.sessionId}`); + if (sessionData && sessionData.metadata) { // Update our metadata with the latest from the server this.metadata = { @@ -363,14 +364,23 @@ export default class LlmChatPanel extends BasicWidget { ...sessionData.metadata }; console.log(`Updated metadata from server for session ${savedData.sessionId}`); - + // If server has sources, update those too if (sessionData.sources && sessionData.sources.length > 0) { this.sources = sessionData.sources; } + } else { + // Session data is missing or incomplete, create a new session + console.log(`Invalid or incomplete session data for ${savedData.sessionId}, creating a new session`); + this.sessionId = null; + await this.createChatSession(); } - } catch (fetchError) { + } catch (fetchError: any) { + // Handle fetch errors (this should now only happen for network issues, not 404s) console.warn(`Could not fetch latest session data: ${fetchError}`); + console.log(`Creating a new session after fetch error`); + this.sessionId = null; + await this.createChatSession(); } } else { console.log(`Saved session ${savedData.sessionId} not found, will create new one`); @@ -658,14 +668,14 @@ export default class LlmChatPanel extends BasicWidget { ...postResponse.metadata }; } - + // Store sources from the response if (postResponse.sources && postResponse.sources.length > 0) { console.log(`Received ${postResponse.sources.length} sources from response`); this.sources = postResponse.sources; this.showSources(postResponse.sources); } - + // Process the assistant response this.processAssistantResponse(postResponse.content, postResponse); @@ -693,7 +703,7 @@ export default class LlmChatPanel extends BasicWidget { content, timestamp: new Date() }); - + // If we received tool execution information, add it to metadata if (fullResponse?.metadata?.toolExecutions) { console.log(`Storing ${fullResponse.metadata.toolExecutions.length} tool executions from response`); @@ -701,10 +711,10 @@ export default class LlmChatPanel extends BasicWidget { if (!this.metadata.toolExecutions) { this.metadata.toolExecutions = []; } - + // Add new tool executions this.metadata.toolExecutions = [ - ...this.metadata.toolExecutions, + ...this.metadata.toolExecutions, ...fullResponse.metadata.toolExecutions ]; } @@ -739,7 +749,7 @@ export default class LlmChatPanel extends BasicWidget { // Content update handler (content: string, isDone: boolean = false) => { this.updateStreamingUI(content, isDone); - + // Update session data with additional metadata when streaming is complete if (isDone) { // Update our metadata with info from the server @@ -773,7 +783,7 @@ export default class LlmChatPanel extends BasicWidget { }>(`llm/sessions/${this.sessionId}`) .then((sessionData) => { console.log("Got updated session data:", sessionData); - + // Store metadata if (sessionData.metadata) { this.metadata = { @@ -781,20 +791,20 @@ export default class LlmChatPanel extends BasicWidget { ...sessionData.metadata }; } - + // Store sources if (sessionData.sources && sessionData.sources.length > 0) { this.sources = sessionData.sources; this.showSources(sessionData.sources); } - + // Make sure we include the cached tool executions if (toolExecutionsCache.length > 0) { console.log(`Including ${toolExecutionsCache.length} cached tool executions in metadata`); if (!this.metadata.toolExecutions) { this.metadata.toolExecutions = []; } - + // Add any tool executions from our cache that aren't already in metadata const existingIds = new Set((this.metadata.toolExecutions || []).map((t: {id: string}) => t.id)); for (const toolExec of toolExecutionsCache) { @@ -804,7 +814,7 @@ export default class LlmChatPanel extends BasicWidget { } } } - + // Save the updated data to the note this.saveCurrentData() .catch(err => console.error("Failed to save data after streaming completed:", err)); @@ -819,7 +829,7 @@ export default class LlmChatPanel extends BasicWidget { // Tool execution handler (toolData: any) => { this.showToolExecutionInfo(toolData); - + // Cache tools we see during streaming to include them in the final saved data if (toolData && toolData.action === 'result' && toolData.tool) { // Create a tool execution record @@ -831,20 +841,20 @@ export default class LlmChatPanel extends BasicWidget { error: toolData.error, timestamp: new Date().toISOString() }; - + // Add to both our local cache for immediate saving and to metadata for later saving toolExecutionsCache.push(toolExec); - + // Initialize toolExecutions array if it doesn't exist if (!this.metadata.toolExecutions) { this.metadata.toolExecutions = []; } - + // Add tool execution to our metadata this.metadata.toolExecutions.push(toolExec); - + console.log(`Cached tool execution for ${toolData.tool} to be saved later`); - + // Save immediately after receiving a tool execution // This ensures we don't lose tool execution data if streaming fails this.saveCurrentData().catch(err => { @@ -912,7 +922,7 @@ export default class LlmChatPanel extends BasicWidget { this.addMessageToChat('assistant', assistantResponse); console.log(`[${logId}] Successfully added new assistant message`); } - + // Update messages array only if this is the first update or the final update if (!this.messages.some(m => m.role === 'assistant') || isDone) { // Add or update the assistant message in our local array @@ -928,7 +938,7 @@ export default class LlmChatPanel extends BasicWidget { timestamp: new Date() }); } - + // If this is the final update, save the data if (isDone) { console.log(`[${logId}] Streaming finished, saving data to note`); diff --git a/src/services/llm/rest_chat_service.ts b/src/services/llm/rest_chat_service.ts index 4f669118f..d55a3780e 100644 --- a/src/services/llm/rest_chat_service.ts +++ b/src/services/llm/rest_chat_service.ts @@ -493,7 +493,7 @@ class RestChatService { // Create a stream callback wrapper // This will ensure we properly handle all streaming messages let messageContent = ''; - + // Used to track tool call responses for metadata storage const toolResponseMap = new Map(); let streamFinished = false; @@ -628,13 +628,13 @@ class RestChatService { // Extract sources if they're available const sources = (response as any).sources || []; - + // Store sources in the session metadata if they're present if (sources.length > 0) { session.metadata.sources = sources; log.info(`Stored ${sources.length} sources in session metadata`); } - + // Return the response with complete metadata return { content: response.text || '', @@ -1160,12 +1160,12 @@ class RestChatService { content: messageContent, timestamp: new Date() }; - + // If there were tool calls, store them with the message if (response.tool_calls && response.tool_calls.length > 0) { assistantMessage.tool_calls = response.tool_calls; } - + session.messages.push(assistantMessage); return; @@ -1187,12 +1187,12 @@ class RestChatService { // Handle standard streaming through the stream() method if (response.stream) { log.info(`Provider ${service.getName()} supports streaming via stream() method`); - + // Store information about the model and provider in session metadata session.metadata.model = response.model || session.metadata.model; session.metadata.provider = response.provider || session.metadata.provider; session.metadata.lastUpdated = new Date().toISOString(); - + // If response has tool_calls, capture those for later storage in metadata if (response.tool_calls && response.tool_calls.length > 0) { log.info(`Storing ${response.tool_calls.length} initial tool calls in session metadata`); @@ -1245,16 +1245,16 @@ class RestChatService { // Signal completion when done if (chunk.done) { log.info(`Stream completed from ${service.getName()}, total content: ${messageContent.length} chars`); - + // Store tool executions from the conversation into metadata if (session.metadata.pendingToolCalls) { const toolExecutions = session.metadata.toolExecutions || []; - + // We don't have a toolResponseMap available at this scope // Just record the pending tool calls with minimal information for (const toolCall of session.metadata.pendingToolCalls) { if (!toolCall.id) continue; - + // Parse arguments let args = toolCall.function.arguments; if (typeof args === 'string') { @@ -1264,7 +1264,7 @@ class RestChatService { // Keep as string if not valid JSON } } - + // Add to tool executions with minimal info toolExecutions.push({ id: toolCall.id, @@ -1274,7 +1274,7 @@ class RestChatService { timestamp: new Date().toISOString() }); } - + // Update session metadata session.metadata.toolExecutions = toolExecutions; delete session.metadata.pendingToolCalls; @@ -1478,35 +1478,35 @@ class RestChatService { */ private recordToolExecution(sessionId: string, tool: any, result: string, error?: string): void { if (!sessionId) return; - + const session = sessions.get(sessionId); if (!session) return; - + try { const toolExecutions = session.metadata.toolExecutions || []; - + // Format tool execution record const execution = { id: tool.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`, name: tool.function?.name || 'unknown', - arguments: typeof tool.function?.arguments === 'string' + arguments: typeof tool.function?.arguments === 'string' ? (() => { try { return JSON.parse(tool.function.arguments); } catch { return tool.function.arguments; } })() : tool.function?.arguments || {}, result: result, error: error, timestamp: new Date().toISOString() }; - + // Add to tool executions toolExecutions.push(execution); session.metadata.toolExecutions = toolExecutions; - + log.info(`Recorded tool execution for ${execution.name} in session ${sessionId}`); } catch (err) { log.error(`Failed to record tool execution: ${err}`); } } - + buildContextFromNotes(sources: NoteSource[], query: string): string { if (!sources || sources.length === 0) { return query || ''; @@ -1604,7 +1604,15 @@ class RestChatService { // Check if session exists const session = sessions.get(sessionId); if (!session) { - throw new Error(`Session with ID ${sessionId} not found`); + // Instead of throwing an error, return a structured 404 response + // that the frontend can handle gracefully + res.status(404).json({ + error: true, + message: `Session with ID ${sessionId} not found`, + code: 'session_not_found', + sessionId + }); + return null; // Return null to prevent further processing } // Return session with metadata and additional fields