mirror of
				https://github.com/TriliumNext/Notes.git
				synced 2025-10-31 04:51:31 +08:00 
			
		
		
		
	okay openai tool calling response is close to working
This commit is contained in:
		
							parent
							
								
									6750467edc
								
							
						
					
					
						commit
						c04e3b2c89
					
				| @ -42,6 +42,12 @@ export interface StreamChunk { | ||||
|      * This can include thinking state, tool execution info, etc. | ||||
|      */ | ||||
|     raw?: any; | ||||
| 
 | ||||
|     /** | ||||
|      * Tool calls from the LLM (if any) | ||||
|      * These may be accumulated over multiple chunks during streaming | ||||
|      */ | ||||
|     tool_calls?: ToolCall[] | any[]; | ||||
| } | ||||
| 
 | ||||
| /** | ||||
|  | ||||
| @ -73,11 +73,16 @@ export class OpenAIService extends BaseAIService { | ||||
|                 // Get stream from OpenAI SDK
 | ||||
|                 const stream = await client.chat.completions.create(params); | ||||
| 
 | ||||
|                 // Create a closure to hold accumulated tool calls
 | ||||
|                 let accumulatedToolCalls: any[] = []; | ||||
| 
 | ||||
|                 // Return a response with the stream handler
 | ||||
|                 return { | ||||
|                 const response: ChatResponse = { | ||||
|                     text: '', // Initial empty text, will be populated during streaming
 | ||||
|                     model: params.model, | ||||
|                     provider: this.getName(), | ||||
|                     // Add tool_calls property that will be populated during streaming
 | ||||
|                     tool_calls: [], | ||||
|                     stream: async (callback) => { | ||||
|                         let completeText = ''; | ||||
| 
 | ||||
| @ -88,15 +93,53 @@ export class OpenAIService extends BaseAIService { | ||||
|                                     const content = chunk.choices[0]?.delta?.content || ''; | ||||
|                                     const isDone = !!chunk.choices[0]?.finish_reason; | ||||
| 
 | ||||
|                                     // Check for tool calls in the delta
 | ||||
|                                     const deltaToolCalls = chunk.choices[0]?.delta?.tool_calls; | ||||
| 
 | ||||
|                                     if (deltaToolCalls) { | ||||
|                                         // Process and accumulate tool calls from this chunk
 | ||||
|                                         for (const deltaToolCall of deltaToolCalls) { | ||||
|                                             const toolCallId = deltaToolCall.index; | ||||
| 
 | ||||
|                                             // Initialize or update the accumulated tool call
 | ||||
|                                             if (!accumulatedToolCalls[toolCallId]) { | ||||
|                                                 accumulatedToolCalls[toolCallId] = { | ||||
|                                                     id: deltaToolCall.id || `call_${toolCallId}`, | ||||
|                                                     type: deltaToolCall.type || 'function', | ||||
|                                                     function: { | ||||
|                                                         name: '', | ||||
|                                                         arguments: '' | ||||
|                                                     } | ||||
|                                                 }; | ||||
|                                             } | ||||
| 
 | ||||
|                                             // Update function name if present
 | ||||
|                                             if (deltaToolCall.function?.name) { | ||||
|                                                 accumulatedToolCalls[toolCallId].function.name = | ||||
|                                                     deltaToolCall.function.name; | ||||
|                                             } | ||||
| 
 | ||||
|                                             // Append to function arguments if present
 | ||||
|                                             if (deltaToolCall.function?.arguments) { | ||||
|                                                 accumulatedToolCalls[toolCallId].function.arguments += | ||||
|                                                     deltaToolCall.function.arguments; | ||||
|                                             } | ||||
|                                         } | ||||
| 
 | ||||
|                                         // Important: Update the response's tool_calls with accumulated tool calls
 | ||||
|                                         response.tool_calls = accumulatedToolCalls.filter(Boolean); | ||||
|                                     } | ||||
| 
 | ||||
|                                     if (content) { | ||||
|                                         completeText += content; | ||||
|                                     } | ||||
| 
 | ||||
|                                     // Send the chunk to the caller with raw data
 | ||||
|                                     // Send the chunk to the caller with raw data and any accumulated tool calls
 | ||||
|                                     await callback({ | ||||
|                                         text: content, | ||||
|                                         done: isDone, | ||||
|                                         raw: chunk // Include the raw chunk for advanced processing
 | ||||
|                                         raw: chunk, | ||||
|                                         tool_calls: accumulatedToolCalls.length > 0 ? accumulatedToolCalls.filter(Boolean) : undefined | ||||
|                                     }); | ||||
| 
 | ||||
|                                     if (isDone) { | ||||
| @ -110,10 +153,18 @@ export class OpenAIService extends BaseAIService { | ||||
|                                 if ('choices' in stream) { | ||||
|                                     const content = stream.choices[0]?.message?.content || ''; | ||||
|                                     completeText = content; | ||||
| 
 | ||||
|                                     // Check if there are tool calls in the non-stream response
 | ||||
|                                     const toolCalls = stream.choices[0]?.message?.tool_calls; | ||||
|                                     if (toolCalls) { | ||||
|                                         response.tool_calls = toolCalls; | ||||
|                                     } | ||||
| 
 | ||||
|                                     await callback({ | ||||
|                                         text: content, | ||||
|                                         done: true, | ||||
|                                         raw: stream | ||||
|                                         raw: stream, | ||||
|                                         tool_calls: toolCalls | ||||
|                                     }); | ||||
|                                 } | ||||
|                             } | ||||
| @ -122,9 +173,15 @@ export class OpenAIService extends BaseAIService { | ||||
|                             throw error; | ||||
|                         } | ||||
| 
 | ||||
|                         // Update the response's text with the complete text
 | ||||
|                         response.text = completeText; | ||||
| 
 | ||||
|                         // Return the complete text
 | ||||
|                         return completeText; | ||||
|                     } | ||||
|                 }; | ||||
| 
 | ||||
|                 return response; | ||||
|             } else { | ||||
|                 // Non-streaming response
 | ||||
|                 params.stream = false; | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	 perf3ct
						perf3ct