mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-30 03:32:26 +08:00
Do a better job of not having the frontend lockup if the session doesn't exist
This commit is contained in:
parent
9a68155edc
commit
e65c5ddd46
@ -28,9 +28,9 @@ export async function createChatSession(): Promise<string | null> {
|
|||||||
*/
|
*/
|
||||||
export async function checkSessionExists(sessionId: string): Promise<boolean> {
|
export async function checkSessionExists(sessionId: string): Promise<boolean> {
|
||||||
try {
|
try {
|
||||||
const sessionCheck = await server.get<any>(`llm/sessions/${sessionId}`);
|
const sessionCheck = await server.getWithSilentNotFound<any>(`llm/sessions/${sessionId}`);
|
||||||
return !!(sessionCheck && sessionCheck.id);
|
return !!(sessionCheck && sessionCheck.id);
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
console.log(`Error checking session ${sessionId}:`, error);
|
console.log(`Error checking session ${sessionId}:`, error);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -170,13 +170,13 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
error?: string;
|
error?: string;
|
||||||
timestamp: string;
|
timestamp: string;
|
||||||
}> = [];
|
}> = [];
|
||||||
|
|
||||||
// First include any tool executions already in metadata (from streaming events)
|
// First include any tool executions already in metadata (from streaming events)
|
||||||
if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) {
|
if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) {
|
||||||
toolExecutions = [...this.metadata.toolExecutions];
|
toolExecutions = [...this.metadata.toolExecutions];
|
||||||
console.log(`Including ${toolExecutions.length} tool executions from metadata`);
|
console.log(`Including ${toolExecutions.length} tool executions from metadata`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also extract any visible tool steps from the UI
|
// Also extract any visible tool steps from the UI
|
||||||
const extractedExecutions = toolSteps.map(step => {
|
const extractedExecutions = toolSteps.map(step => {
|
||||||
// Parse tool execution information
|
// Parse tool execution information
|
||||||
@ -219,7 +219,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
// Merge the tool executions, keeping only unique IDs
|
// Merge the tool executions, keeping only unique IDs
|
||||||
const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id));
|
const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id));
|
||||||
for (const exec of extractedExecutions) {
|
for (const exec of extractedExecutions) {
|
||||||
@ -287,7 +287,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
if (savedData.sources && Array.isArray(savedData.sources)) {
|
if (savedData.sources && Array.isArray(savedData.sources)) {
|
||||||
this.sources = savedData.sources;
|
this.sources = savedData.sources;
|
||||||
console.log(`Loaded ${this.sources.length} sources from saved data`);
|
console.log(`Loaded ${this.sources.length} sources from saved data`);
|
||||||
|
|
||||||
// Show sources in the UI if they exist
|
// Show sources in the UI if they exist
|
||||||
if (this.sources.length > 0) {
|
if (this.sources.length > 0) {
|
||||||
this.showSources(this.sources);
|
this.showSources(this.sources);
|
||||||
@ -300,19 +300,19 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
...this.metadata,
|
...this.metadata,
|
||||||
...savedData.metadata
|
...savedData.metadata
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure tool executions are loaded
|
// Ensure tool executions are loaded
|
||||||
if (savedData.metadata.toolExecutions && Array.isArray(savedData.metadata.toolExecutions)) {
|
if (savedData.metadata.toolExecutions && Array.isArray(savedData.metadata.toolExecutions)) {
|
||||||
console.log(`Loaded ${savedData.metadata.toolExecutions.length} tool executions from saved data`);
|
console.log(`Loaded ${savedData.metadata.toolExecutions.length} tool executions from saved data`);
|
||||||
|
|
||||||
if (!this.metadata.toolExecutions) {
|
if (!this.metadata.toolExecutions) {
|
||||||
this.metadata.toolExecutions = [];
|
this.metadata.toolExecutions = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we don't lose any tool executions
|
// Make sure we don't lose any tool executions
|
||||||
this.metadata.toolExecutions = savedData.metadata.toolExecutions;
|
this.metadata.toolExecutions = savedData.metadata.toolExecutions;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Loaded metadata from saved data:`, this.metadata);
|
console.log(`Loaded metadata from saved data:`, this.metadata);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,10 +325,10 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
if (sessionExists) {
|
if (sessionExists) {
|
||||||
console.log(`Restored session ${savedData.sessionId}`);
|
console.log(`Restored session ${savedData.sessionId}`);
|
||||||
this.sessionId = savedData.sessionId;
|
this.sessionId = savedData.sessionId;
|
||||||
|
|
||||||
// If we successfully restored a session, also fetch the latest session data
|
// If we successfully restored a session, also fetch the latest session data
|
||||||
try {
|
try {
|
||||||
const sessionData = await server.get<{
|
const sessionData = await server.getWithSilentNotFound<{
|
||||||
metadata?: {
|
metadata?: {
|
||||||
model?: string;
|
model?: string;
|
||||||
provider?: string;
|
provider?: string;
|
||||||
@ -356,6 +356,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
content?: string;
|
content?: string;
|
||||||
}>;
|
}>;
|
||||||
}>(`llm/sessions/${savedData.sessionId}`);
|
}>(`llm/sessions/${savedData.sessionId}`);
|
||||||
|
|
||||||
if (sessionData && sessionData.metadata) {
|
if (sessionData && sessionData.metadata) {
|
||||||
// Update our metadata with the latest from the server
|
// Update our metadata with the latest from the server
|
||||||
this.metadata = {
|
this.metadata = {
|
||||||
@ -363,14 +364,23 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
...sessionData.metadata
|
...sessionData.metadata
|
||||||
};
|
};
|
||||||
console.log(`Updated metadata from server for session ${savedData.sessionId}`);
|
console.log(`Updated metadata from server for session ${savedData.sessionId}`);
|
||||||
|
|
||||||
// If server has sources, update those too
|
// If server has sources, update those too
|
||||||
if (sessionData.sources && sessionData.sources.length > 0) {
|
if (sessionData.sources && sessionData.sources.length > 0) {
|
||||||
this.sources = sessionData.sources;
|
this.sources = sessionData.sources;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Session data is missing or incomplete, create a new session
|
||||||
|
console.log(`Invalid or incomplete session data for ${savedData.sessionId}, creating a new session`);
|
||||||
|
this.sessionId = null;
|
||||||
|
await this.createChatSession();
|
||||||
}
|
}
|
||||||
} catch (fetchError) {
|
} catch (fetchError: any) {
|
||||||
|
// Handle fetch errors (this should now only happen for network issues, not 404s)
|
||||||
console.warn(`Could not fetch latest session data: ${fetchError}`);
|
console.warn(`Could not fetch latest session data: ${fetchError}`);
|
||||||
|
console.log(`Creating a new session after fetch error`);
|
||||||
|
this.sessionId = null;
|
||||||
|
await this.createChatSession();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.log(`Saved session ${savedData.sessionId} not found, will create new one`);
|
console.log(`Saved session ${savedData.sessionId} not found, will create new one`);
|
||||||
@ -658,14 +668,14 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
...postResponse.metadata
|
...postResponse.metadata
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store sources from the response
|
// Store sources from the response
|
||||||
if (postResponse.sources && postResponse.sources.length > 0) {
|
if (postResponse.sources && postResponse.sources.length > 0) {
|
||||||
console.log(`Received ${postResponse.sources.length} sources from response`);
|
console.log(`Received ${postResponse.sources.length} sources from response`);
|
||||||
this.sources = postResponse.sources;
|
this.sources = postResponse.sources;
|
||||||
this.showSources(postResponse.sources);
|
this.showSources(postResponse.sources);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process the assistant response
|
// Process the assistant response
|
||||||
this.processAssistantResponse(postResponse.content, postResponse);
|
this.processAssistantResponse(postResponse.content, postResponse);
|
||||||
|
|
||||||
@ -693,7 +703,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
content,
|
content,
|
||||||
timestamp: new Date()
|
timestamp: new Date()
|
||||||
});
|
});
|
||||||
|
|
||||||
// If we received tool execution information, add it to metadata
|
// If we received tool execution information, add it to metadata
|
||||||
if (fullResponse?.metadata?.toolExecutions) {
|
if (fullResponse?.metadata?.toolExecutions) {
|
||||||
console.log(`Storing ${fullResponse.metadata.toolExecutions.length} tool executions from response`);
|
console.log(`Storing ${fullResponse.metadata.toolExecutions.length} tool executions from response`);
|
||||||
@ -701,10 +711,10 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
if (!this.metadata.toolExecutions) {
|
if (!this.metadata.toolExecutions) {
|
||||||
this.metadata.toolExecutions = [];
|
this.metadata.toolExecutions = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add new tool executions
|
// Add new tool executions
|
||||||
this.metadata.toolExecutions = [
|
this.metadata.toolExecutions = [
|
||||||
...this.metadata.toolExecutions,
|
...this.metadata.toolExecutions,
|
||||||
...fullResponse.metadata.toolExecutions
|
...fullResponse.metadata.toolExecutions
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
@ -739,7 +749,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
// Content update handler
|
// Content update handler
|
||||||
(content: string, isDone: boolean = false) => {
|
(content: string, isDone: boolean = false) => {
|
||||||
this.updateStreamingUI(content, isDone);
|
this.updateStreamingUI(content, isDone);
|
||||||
|
|
||||||
// Update session data with additional metadata when streaming is complete
|
// Update session data with additional metadata when streaming is complete
|
||||||
if (isDone) {
|
if (isDone) {
|
||||||
// Update our metadata with info from the server
|
// Update our metadata with info from the server
|
||||||
@ -773,7 +783,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
}>(`llm/sessions/${this.sessionId}`)
|
}>(`llm/sessions/${this.sessionId}`)
|
||||||
.then((sessionData) => {
|
.then((sessionData) => {
|
||||||
console.log("Got updated session data:", sessionData);
|
console.log("Got updated session data:", sessionData);
|
||||||
|
|
||||||
// Store metadata
|
// Store metadata
|
||||||
if (sessionData.metadata) {
|
if (sessionData.metadata) {
|
||||||
this.metadata = {
|
this.metadata = {
|
||||||
@ -781,20 +791,20 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
...sessionData.metadata
|
...sessionData.metadata
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store sources
|
// Store sources
|
||||||
if (sessionData.sources && sessionData.sources.length > 0) {
|
if (sessionData.sources && sessionData.sources.length > 0) {
|
||||||
this.sources = sessionData.sources;
|
this.sources = sessionData.sources;
|
||||||
this.showSources(sessionData.sources);
|
this.showSources(sessionData.sources);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure we include the cached tool executions
|
// Make sure we include the cached tool executions
|
||||||
if (toolExecutionsCache.length > 0) {
|
if (toolExecutionsCache.length > 0) {
|
||||||
console.log(`Including ${toolExecutionsCache.length} cached tool executions in metadata`);
|
console.log(`Including ${toolExecutionsCache.length} cached tool executions in metadata`);
|
||||||
if (!this.metadata.toolExecutions) {
|
if (!this.metadata.toolExecutions) {
|
||||||
this.metadata.toolExecutions = [];
|
this.metadata.toolExecutions = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add any tool executions from our cache that aren't already in metadata
|
// Add any tool executions from our cache that aren't already in metadata
|
||||||
const existingIds = new Set((this.metadata.toolExecutions || []).map((t: {id: string}) => t.id));
|
const existingIds = new Set((this.metadata.toolExecutions || []).map((t: {id: string}) => t.id));
|
||||||
for (const toolExec of toolExecutionsCache) {
|
for (const toolExec of toolExecutionsCache) {
|
||||||
@ -804,7 +814,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the updated data to the note
|
// Save the updated data to the note
|
||||||
this.saveCurrentData()
|
this.saveCurrentData()
|
||||||
.catch(err => console.error("Failed to save data after streaming completed:", err));
|
.catch(err => console.error("Failed to save data after streaming completed:", err));
|
||||||
@ -819,7 +829,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
// Tool execution handler
|
// Tool execution handler
|
||||||
(toolData: any) => {
|
(toolData: any) => {
|
||||||
this.showToolExecutionInfo(toolData);
|
this.showToolExecutionInfo(toolData);
|
||||||
|
|
||||||
// Cache tools we see during streaming to include them in the final saved data
|
// Cache tools we see during streaming to include them in the final saved data
|
||||||
if (toolData && toolData.action === 'result' && toolData.tool) {
|
if (toolData && toolData.action === 'result' && toolData.tool) {
|
||||||
// Create a tool execution record
|
// Create a tool execution record
|
||||||
@ -831,20 +841,20 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
error: toolData.error,
|
error: toolData.error,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add to both our local cache for immediate saving and to metadata for later saving
|
// Add to both our local cache for immediate saving and to metadata for later saving
|
||||||
toolExecutionsCache.push(toolExec);
|
toolExecutionsCache.push(toolExec);
|
||||||
|
|
||||||
// Initialize toolExecutions array if it doesn't exist
|
// Initialize toolExecutions array if it doesn't exist
|
||||||
if (!this.metadata.toolExecutions) {
|
if (!this.metadata.toolExecutions) {
|
||||||
this.metadata.toolExecutions = [];
|
this.metadata.toolExecutions = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add tool execution to our metadata
|
// Add tool execution to our metadata
|
||||||
this.metadata.toolExecutions.push(toolExec);
|
this.metadata.toolExecutions.push(toolExec);
|
||||||
|
|
||||||
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
|
console.log(`Cached tool execution for ${toolData.tool} to be saved later`);
|
||||||
|
|
||||||
// Save immediately after receiving a tool execution
|
// Save immediately after receiving a tool execution
|
||||||
// This ensures we don't lose tool execution data if streaming fails
|
// This ensures we don't lose tool execution data if streaming fails
|
||||||
this.saveCurrentData().catch(err => {
|
this.saveCurrentData().catch(err => {
|
||||||
@ -912,7 +922,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
this.addMessageToChat('assistant', assistantResponse);
|
this.addMessageToChat('assistant', assistantResponse);
|
||||||
console.log(`[${logId}] Successfully added new assistant message`);
|
console.log(`[${logId}] Successfully added new assistant message`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update messages array only if this is the first update or the final update
|
// Update messages array only if this is the first update or the final update
|
||||||
if (!this.messages.some(m => m.role === 'assistant') || isDone) {
|
if (!this.messages.some(m => m.role === 'assistant') || isDone) {
|
||||||
// Add or update the assistant message in our local array
|
// Add or update the assistant message in our local array
|
||||||
@ -928,7 +938,7 @@ export default class LlmChatPanel extends BasicWidget {
|
|||||||
timestamp: new Date()
|
timestamp: new Date()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// If this is the final update, save the data
|
// If this is the final update, save the data
|
||||||
if (isDone) {
|
if (isDone) {
|
||||||
console.log(`[${logId}] Streaming finished, saving data to note`);
|
console.log(`[${logId}] Streaming finished, saving data to note`);
|
||||||
|
@ -493,7 +493,7 @@ class RestChatService {
|
|||||||
// Create a stream callback wrapper
|
// Create a stream callback wrapper
|
||||||
// This will ensure we properly handle all streaming messages
|
// This will ensure we properly handle all streaming messages
|
||||||
let messageContent = '';
|
let messageContent = '';
|
||||||
|
|
||||||
// Used to track tool call responses for metadata storage
|
// Used to track tool call responses for metadata storage
|
||||||
const toolResponseMap = new Map<string, string>();
|
const toolResponseMap = new Map<string, string>();
|
||||||
let streamFinished = false;
|
let streamFinished = false;
|
||||||
@ -628,13 +628,13 @@ class RestChatService {
|
|||||||
|
|
||||||
// Extract sources if they're available
|
// Extract sources if they're available
|
||||||
const sources = (response as any).sources || [];
|
const sources = (response as any).sources || [];
|
||||||
|
|
||||||
// Store sources in the session metadata if they're present
|
// Store sources in the session metadata if they're present
|
||||||
if (sources.length > 0) {
|
if (sources.length > 0) {
|
||||||
session.metadata.sources = sources;
|
session.metadata.sources = sources;
|
||||||
log.info(`Stored ${sources.length} sources in session metadata`);
|
log.info(`Stored ${sources.length} sources in session metadata`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the response with complete metadata
|
// Return the response with complete metadata
|
||||||
return {
|
return {
|
||||||
content: response.text || '',
|
content: response.text || '',
|
||||||
@ -1160,12 +1160,12 @@ class RestChatService {
|
|||||||
content: messageContent,
|
content: messageContent,
|
||||||
timestamp: new Date()
|
timestamp: new Date()
|
||||||
};
|
};
|
||||||
|
|
||||||
// If there were tool calls, store them with the message
|
// If there were tool calls, store them with the message
|
||||||
if (response.tool_calls && response.tool_calls.length > 0) {
|
if (response.tool_calls && response.tool_calls.length > 0) {
|
||||||
assistantMessage.tool_calls = response.tool_calls;
|
assistantMessage.tool_calls = response.tool_calls;
|
||||||
}
|
}
|
||||||
|
|
||||||
session.messages.push(assistantMessage);
|
session.messages.push(assistantMessage);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
@ -1187,12 +1187,12 @@ class RestChatService {
|
|||||||
// Handle standard streaming through the stream() method
|
// Handle standard streaming through the stream() method
|
||||||
if (response.stream) {
|
if (response.stream) {
|
||||||
log.info(`Provider ${service.getName()} supports streaming via stream() method`);
|
log.info(`Provider ${service.getName()} supports streaming via stream() method`);
|
||||||
|
|
||||||
// Store information about the model and provider in session metadata
|
// Store information about the model and provider in session metadata
|
||||||
session.metadata.model = response.model || session.metadata.model;
|
session.metadata.model = response.model || session.metadata.model;
|
||||||
session.metadata.provider = response.provider || session.metadata.provider;
|
session.metadata.provider = response.provider || session.metadata.provider;
|
||||||
session.metadata.lastUpdated = new Date().toISOString();
|
session.metadata.lastUpdated = new Date().toISOString();
|
||||||
|
|
||||||
// If response has tool_calls, capture those for later storage in metadata
|
// If response has tool_calls, capture those for later storage in metadata
|
||||||
if (response.tool_calls && response.tool_calls.length > 0) {
|
if (response.tool_calls && response.tool_calls.length > 0) {
|
||||||
log.info(`Storing ${response.tool_calls.length} initial tool calls in session metadata`);
|
log.info(`Storing ${response.tool_calls.length} initial tool calls in session metadata`);
|
||||||
@ -1245,16 +1245,16 @@ class RestChatService {
|
|||||||
// Signal completion when done
|
// Signal completion when done
|
||||||
if (chunk.done) {
|
if (chunk.done) {
|
||||||
log.info(`Stream completed from ${service.getName()}, total content: ${messageContent.length} chars`);
|
log.info(`Stream completed from ${service.getName()}, total content: ${messageContent.length} chars`);
|
||||||
|
|
||||||
// Store tool executions from the conversation into metadata
|
// Store tool executions from the conversation into metadata
|
||||||
if (session.metadata.pendingToolCalls) {
|
if (session.metadata.pendingToolCalls) {
|
||||||
const toolExecutions = session.metadata.toolExecutions || [];
|
const toolExecutions = session.metadata.toolExecutions || [];
|
||||||
|
|
||||||
// We don't have a toolResponseMap available at this scope
|
// We don't have a toolResponseMap available at this scope
|
||||||
// Just record the pending tool calls with minimal information
|
// Just record the pending tool calls with minimal information
|
||||||
for (const toolCall of session.metadata.pendingToolCalls) {
|
for (const toolCall of session.metadata.pendingToolCalls) {
|
||||||
if (!toolCall.id) continue;
|
if (!toolCall.id) continue;
|
||||||
|
|
||||||
// Parse arguments
|
// Parse arguments
|
||||||
let args = toolCall.function.arguments;
|
let args = toolCall.function.arguments;
|
||||||
if (typeof args === 'string') {
|
if (typeof args === 'string') {
|
||||||
@ -1264,7 +1264,7 @@ class RestChatService {
|
|||||||
// Keep as string if not valid JSON
|
// Keep as string if not valid JSON
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add to tool executions with minimal info
|
// Add to tool executions with minimal info
|
||||||
toolExecutions.push({
|
toolExecutions.push({
|
||||||
id: toolCall.id,
|
id: toolCall.id,
|
||||||
@ -1274,7 +1274,7 @@ class RestChatService {
|
|||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update session metadata
|
// Update session metadata
|
||||||
session.metadata.toolExecutions = toolExecutions;
|
session.metadata.toolExecutions = toolExecutions;
|
||||||
delete session.metadata.pendingToolCalls;
|
delete session.metadata.pendingToolCalls;
|
||||||
@ -1478,35 +1478,35 @@ class RestChatService {
|
|||||||
*/
|
*/
|
||||||
private recordToolExecution(sessionId: string, tool: any, result: string, error?: string): void {
|
private recordToolExecution(sessionId: string, tool: any, result: string, error?: string): void {
|
||||||
if (!sessionId) return;
|
if (!sessionId) return;
|
||||||
|
|
||||||
const session = sessions.get(sessionId);
|
const session = sessions.get(sessionId);
|
||||||
if (!session) return;
|
if (!session) return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const toolExecutions = session.metadata.toolExecutions || [];
|
const toolExecutions = session.metadata.toolExecutions || [];
|
||||||
|
|
||||||
// Format tool execution record
|
// Format tool execution record
|
||||||
const execution = {
|
const execution = {
|
||||||
id: tool.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
|
id: tool.id || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
|
||||||
name: tool.function?.name || 'unknown',
|
name: tool.function?.name || 'unknown',
|
||||||
arguments: typeof tool.function?.arguments === 'string'
|
arguments: typeof tool.function?.arguments === 'string'
|
||||||
? (() => { try { return JSON.parse(tool.function.arguments); } catch { return tool.function.arguments; } })()
|
? (() => { try { return JSON.parse(tool.function.arguments); } catch { return tool.function.arguments; } })()
|
||||||
: tool.function?.arguments || {},
|
: tool.function?.arguments || {},
|
||||||
result: result,
|
result: result,
|
||||||
error: error,
|
error: error,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Add to tool executions
|
// Add to tool executions
|
||||||
toolExecutions.push(execution);
|
toolExecutions.push(execution);
|
||||||
session.metadata.toolExecutions = toolExecutions;
|
session.metadata.toolExecutions = toolExecutions;
|
||||||
|
|
||||||
log.info(`Recorded tool execution for ${execution.name} in session ${sessionId}`);
|
log.info(`Recorded tool execution for ${execution.name} in session ${sessionId}`);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log.error(`Failed to record tool execution: ${err}`);
|
log.error(`Failed to record tool execution: ${err}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
buildContextFromNotes(sources: NoteSource[], query: string): string {
|
buildContextFromNotes(sources: NoteSource[], query: string): string {
|
||||||
if (!sources || sources.length === 0) {
|
if (!sources || sources.length === 0) {
|
||||||
return query || '';
|
return query || '';
|
||||||
@ -1604,7 +1604,15 @@ class RestChatService {
|
|||||||
// Check if session exists
|
// Check if session exists
|
||||||
const session = sessions.get(sessionId);
|
const session = sessions.get(sessionId);
|
||||||
if (!session) {
|
if (!session) {
|
||||||
throw new Error(`Session with ID ${sessionId} not found`);
|
// Instead of throwing an error, return a structured 404 response
|
||||||
|
// that the frontend can handle gracefully
|
||||||
|
res.status(404).json({
|
||||||
|
error: true,
|
||||||
|
message: `Session with ID ${sessionId} not found`,
|
||||||
|
code: 'session_not_found',
|
||||||
|
sessionId
|
||||||
|
});
|
||||||
|
return null; // Return null to prevent further processing
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return session with metadata and additional fields
|
// Return session with metadata and additional fields
|
||||||
|
Loading…
x
Reference in New Issue
Block a user