diff --git a/apps/server/src/services/llm/pipeline/stages/tool_calling_stage.ts b/apps/server/src/services/llm/pipeline/stages/tool_calling_stage.ts index fe5a6df5d..e71edbdfb 100644 --- a/apps/server/src/services/llm/pipeline/stages/tool_calling_stage.ts +++ b/apps/server/src/services/llm/pipeline/stages/tool_calling_stage.ts @@ -69,9 +69,19 @@ export class ToolCallingStage extends BasePipelineStage tool as unknown as ToolInterface); + + // Convert ToolHandler[] to ToolInterface[] with proper type safety + const availableTools: ToolInterface[] = registryTools.map(tool => { + // Create a proper ToolInterface from the ToolHandler + const toolInterface: ToolInterface = { + // Pass through the execute method + execute: (args: Record) => tool.execute(args), + // Include other properties from the tool definition + ...tool.definition + }; + return toolInterface; + }); log.info(`Available tools in registry: ${availableTools.length}`); // Log available tools for debugging diff --git a/apps/server/src/services/llm/providers/ollama_service.ts b/apps/server/src/services/llm/providers/ollama_service.ts index bd120a57e..750118027 100644 --- a/apps/server/src/services/llm/providers/ollama_service.ts +++ b/apps/server/src/services/llm/providers/ollama_service.ts @@ -366,7 +366,6 @@ export class OllamaService extends BaseAIService { }, async (callback) => { let completeText = ''; - let responseToolCalls: ToolCall[] = []; let chunkCount = 0; // Create a response object that will be updated during streaming @@ -410,9 +409,7 @@ export class OllamaService extends BaseAIService { const toolCalls = StreamProcessor.extractToolCalls(chunk); // Update response tool calls if any are found if (toolCalls.length > 0) { - // Update tool calls in the overall response - responseToolCalls = toolCalls; - // Also update the response object's tool_calls for final return + // Update the response object's tool_calls for final return response.tool_calls = toolCalls; }