From 193c577acfb893a8d2cfa0b78611348af5180f89 Mon Sep 17 00:00:00 2001 From: perf3ct Date: Tue, 15 Apr 2025 22:52:13 +0000 Subject: [PATCH] wow, this fixed openai too --- src/services/llm/pipeline/chat_pipeline.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/services/llm/pipeline/chat_pipeline.ts b/src/services/llm/pipeline/chat_pipeline.ts index c1b7e9511..7638ebf20 100644 --- a/src/services/llm/pipeline/chat_pipeline.ts +++ b/src/services/llm/pipeline/chat_pipeline.ts @@ -692,22 +692,22 @@ export class ChatPipeline { log.info(`Sent final response with done=true signal and text content`); } else { // For Anthropic, sometimes text is empty but response is in stream - if (currentResponse.provider === 'Anthropic' && currentResponse.stream) { - log.info(`Detected empty response text for Anthropic provider with stream, sending stream content directly`); - // For Anthropic with stream mode, we need to stream the final response + if ((currentResponse.provider === 'Anthropic' || currentResponse.provider === 'OpenAI') && currentResponse.stream) { + log.info(`Detected empty response text for ${currentResponse.provider} provider with stream, sending stream content directly`); + // For Anthropic/OpenAI with stream mode, we need to stream the final response if (currentResponse.stream) { await currentResponse.stream(async (chunk: StreamChunk) => { // Process the chunk const processedChunk = await this.processStreamChunk(chunk, input.options); - + // Forward to callback streamCallback( - processedChunk.text, + processedChunk.text, processedChunk.done || chunk.done || false, chunk ); }); - log.info(`Completed streaming final Anthropic response after tool execution`); + log.info(`Completed streaming final ${currentResponse.provider} response after tool execution`); } } else { // Empty response with done=true as fallback