get anthropic sdk to send tools

nice

close

what is even going on lol

anthropic tools mostly work
This commit is contained in:
perf3ct 2025-04-14 23:42:38 +00:00
parent 2bc2aa857f
commit 3bddb60ecc
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
4 changed files with 258 additions and 78 deletions

View File

@ -70,8 +70,8 @@ async function listModels(req: Request, res: Response) {
const { baseUrl } = req.body;
// Use provided base URL or default from options
const anthropicBaseUrl = baseUrl ||
await options.getOption('anthropicBaseUrl') ||
const anthropicBaseUrl = baseUrl ||
await options.getOption('anthropicBaseUrl') ||
PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL;
const apiKey = await options.getOption('anthropicApiKey');
@ -80,71 +80,18 @@ async function listModels(req: Request, res: Response) {
throw new Error('Anthropic API key is not configured');
}
log.info(`Listing models from Anthropic API using the SDK`);
log.info(`Using predefined Anthropic models list (avoiding direct API call)`);
// Initialize the Anthropic client with the SDK
const client = new Anthropic({
apiKey,
baseURL: anthropicBaseUrl,
defaultHeaders: {
'anthropic-version': PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION,
'anthropic-beta': PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION
}
});
// Instead of using the SDK's built-in models listing which might not work,
// directly use the predefined available models
const chatModels = PROVIDER_CONSTANTS.ANTHROPIC.AVAILABLE_MODELS.map(model => ({
id: model.id,
name: model.name,
type: 'chat'
}));
// Use the SDK's built-in models listing
const response = await client.models.list();
// Process the models
const allModels = response.data || [];
// Log available models
log.info(`Found ${allModels.length} models from Anthropic: ${allModels.map(m => m.id).join(', ')}`);
// Separate models into chat models and embedding models
const chatModels = allModels
.filter(model =>
// Claude models are for chat
model.id.includes('claude')
)
.map(model => {
// Get a simplified name for display purposes
let displayName = model.id;
// Try to simplify the model name by removing version suffixes
if (model.id.match(/claude-\d+-\w+-\d+/)) {
displayName = model.id.replace(/-\d+$/, '');
}
return {
id: model.id, // Keep full ID for API calls
name: displayName, // Use simplified name for display
type: 'chat'
};
});
// Also include known models that might not be returned by the API
for (const model of PROVIDER_CONSTANTS.ANTHROPIC.AVAILABLE_MODELS) {
// Check if this model is already in our list
if (!chatModels.some((m: AnthropicModel) => m.id === model.id)) {
chatModels.push({
id: model.id,
name: model.name,
type: 'chat'
});
}
}
// Note: Anthropic might not have embedding models yet, but we'll include this for future compatibility
const embeddingModels = allModels
.filter(model =>
// If Anthropic releases embedding models, they'd likely include 'embed' in the name
model.id.includes('embed')
)
.map(model => ({
id: model.id,
name: model.id,
type: 'embedding'
}));
// Anthropic doesn't currently have embedding models
const embeddingModels: AnthropicModel[] = [];
// Return the models list
return {
@ -162,4 +109,4 @@ async function listModels(req: Request, res: Response) {
export default {
listModels
};
};

View File

@ -48,6 +48,16 @@ export interface StreamChunk {
* These may be accumulated over multiple chunks during streaming
*/
tool_calls?: ToolCall[] | any[];
/**
* Tool execution information during streaming
* Includes tool name, args, and execution status
*/
toolExecution?: {
type: 'start' | 'update' | 'complete' | 'error';
tool: any;
result?: any;
};
}
/**
@ -102,6 +112,7 @@ export interface ChatCompletionOptions {
enableTools?: boolean; // Whether to enable tool calling
tools?: any[]; // Tools to provide to the LLM
tool_choice?: any; // Tool choice parameter for the LLM
useAdvancedContext?: boolean; // Whether to use advanced context enrichment
toolExecutionStatus?: any[]; // Status information about executed tools for feedback
providerMetadata?: ModelMetadata; // Metadata about the provider and model capabilities

View File

@ -6,8 +6,9 @@ export const PROVIDER_CONSTANTS = {
DEFAULT_MODEL: 'claude-3-haiku-20240307',
// Model mapping for simplified model names to their full versions
MODEL_MAPPING: {
'claude-3.7-sonnet': 'claude-3-7-sonnet-20240620',
'claude-3.5-haiku': 'claude-3-5-haiku-20240307',
'claude-3.7-sonnet': 'claude-3-7-sonnet-20250219',
'claude-3.5-sonnet': 'claude-3-5-sonnet-20241022',
'claude-3.5-haiku': 'claude-3-5-haiku-20241022',
'claude-3-opus': 'claude-3-opus-20240229',
'claude-3-sonnet': 'claude-3-sonnet-20240229',
'claude-3-haiku': 'claude-3-haiku-20240307',
@ -16,15 +17,21 @@ export const PROVIDER_CONSTANTS = {
// These are the currently available models from Anthropic
AVAILABLE_MODELS: [
{
id: 'claude-3-7-sonnet-20240620',
id: 'claude-3-7-sonnet-20250219',
name: 'Claude 3.7 Sonnet',
description: 'Most intelligent model with hybrid reasoning capabilities',
maxTokens: 8192
},
{
id: 'claude-3-5-haiku-20240307',
id: 'claude-3-5-sonnet-20241022',
name: 'Claude 3.5 Sonnet',
description: 'High level of intelligence and capability',
maxTokens: 8192
},
{
id: 'claude-3-5-haiku-20241022',
name: 'Claude 3.5 Haiku',
description: 'Improved version of Haiku with better performance',
description: 'Fastest model with high intelligence',
maxTokens: 8192
},
{

View File

@ -66,10 +66,15 @@ export class AnthropicService extends BaseAIService {
providerOptions.betaVersion
);
// Log API key format (without revealing the actual key)
const apiKeyPrefix = providerOptions.apiKey?.substring(0, 7) || 'undefined';
const apiKeyLength = providerOptions.apiKey?.length || 0;
log.info(`[DEBUG] Using Anthropic API key with prefix '${apiKeyPrefix}...' and length ${apiKeyLength}`);
log.info(`Using Anthropic API with model: ${providerOptions.model}`);
// Configure request parameters
const requestParams = {
const requestParams: any = {
model: providerOptions.model,
messages: anthropicMessages,
system: systemPrompt,
@ -79,6 +84,32 @@ export class AnthropicService extends BaseAIService {
stream: !!providerOptions.stream
};
// Add tools support if provided
if (opts.tools && opts.tools.length > 0) {
log.info(`Adding ${opts.tools.length} tools to Anthropic request`);
// Convert OpenAI-style function tools to Anthropic format
const anthropicTools = this.convertToolsToAnthropicFormat(opts.tools);
requestParams.tools = anthropicTools;
// Add tool_choice parameter if specified
if (opts.tool_choice) {
if (opts.tool_choice === 'auto') {
requestParams.tool_choice = 'auto';
} else if (opts.tool_choice === 'none') {
requestParams.tool_choice = 'none';
} else if (typeof opts.tool_choice === 'object' && opts.tool_choice.function) {
// Map from OpenAI format to Anthropic format
requestParams.tool_choice = opts.tool_choice.function.name;
} else {
requestParams.tool_choice = opts.tool_choice;
}
}
}
// Log request summary
log.info(`Making ${providerOptions.stream ? 'streaming' : 'non-streaming'} request to Anthropic API with model: ${providerOptions.model}`);
// Handle streaming responses
if (providerOptions.stream) {
return this.handleStreamingResponse(client, requestParams, opts, providerOptions);
@ -92,10 +123,43 @@ export class AnthropicService extends BaseAIService {
.map((block: any) => block.text)
.join('');
// Process tool calls if any are present in the response
let toolCalls = null;
if (response.content) {
const toolBlocks = response.content.filter((block: any) =>
block.type === 'tool_use' ||
(block.type === 'tool_result' && block.tool_use_id)
);
if (toolBlocks.length > 0) {
log.info(`[DEBUG] Found ${toolBlocks.length} tool-related blocks in response`);
toolCalls = toolBlocks.map((block: any) => {
if (block.type === 'tool_use') {
log.info(`[DEBUG] Processing tool_use block: ${JSON.stringify(block, null, 2)}`);
// Convert Anthropic tool_use format to standard format expected by our app
return {
id: block.id,
type: 'function', // Convert back to function type for internal use
function: {
name: block.name,
arguments: block.input || '{}'
}
};
}
return null;
}).filter(Boolean);
log.info(`Extracted ${toolCalls.length} tool calls from Anthropic response`);
}
}
return {
text: textContent,
model: response.model,
provider: this.getName(),
tool_calls: toolCalls,
usage: {
// Anthropic provides token counts in the response
promptTokens: response.usage?.input_tokens,
@ -112,7 +176,7 @@ export class AnthropicService extends BaseAIService {
/**
* Handle streaming response from Anthropic
*
*
* Simplified implementation that leverages the Anthropic SDK's streaming capabilities
*/
private async handleStreamingResponse(
@ -124,9 +188,13 @@ export class AnthropicService extends BaseAIService {
// Create a stream handler function that processes the SDK's stream
const streamHandler = async (callback: (chunk: StreamChunk) => Promise<void> | void): Promise<string> => {
let completeText = '';
const toolCalls: any[] = [];
let currentToolCall: any = null;
try {
// Request a streaming response from Anthropic
log.info(`Starting Anthropic streaming request to: ${providerOptions.baseUrl}/v1/messages`);
const streamResponse = await client.messages.create({
...params,
stream: true
@ -134,11 +202,10 @@ export class AnthropicService extends BaseAIService {
// Process each chunk in the stream
for await (const chunk of streamResponse) {
// Only process text content deltas
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'text_delta') {
const text = chunk.delta.text || '';
completeText += text;
// Send the chunk to the caller
await callback({
text,
@ -146,17 +213,99 @@ export class AnthropicService extends BaseAIService {
raw: chunk // Include the raw chunk for advanced processing
});
}
// Process tool use events - different format in Anthropic API
else if (chunk.type === 'content_block_start' && chunk.content_block?.type === 'tool_use') {
// Start collecting a new tool call - convert to our internal format (OpenAI-like)
currentToolCall = {
id: chunk.content_block.id,
type: 'function', // Convert to function type for internal consistency
function: {
name: chunk.content_block.name,
arguments: ''
}
};
// Log the tool use event
log.info(`Streaming: Tool use started: ${chunk.content_block.name}`);
// Send the tool call event
await callback({
text: '',
done: false,
toolExecution: {
type: 'start',
tool: currentToolCall
},
raw: chunk
});
}
// Process tool input deltas
else if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'tool_use_delta' && currentToolCall) {
// Accumulate tool input
if (chunk.delta.input) {
currentToolCall.function.arguments += chunk.delta.input;
// Send the tool input update
await callback({
text: '',
done: false,
toolExecution: {
type: 'update',
tool: currentToolCall
},
raw: chunk
});
}
}
// Process tool use completion
else if (chunk.type === 'content_block_stop' && currentToolCall) {
// Add the completed tool call to our list
toolCalls.push(currentToolCall);
// Log the tool completion
log.info(`Streaming: Tool use completed: ${currentToolCall.function.name}`);
// Send the tool completion event
await callback({
text: '',
done: false,
toolExecution: {
type: 'complete',
tool: currentToolCall
},
raw: chunk
});
// Reset current tool call
currentToolCall = null;
}
}
// Signal completion
await callback({
text: '',
done: true
done: true,
tool_calls: toolCalls.length > 0 ? toolCalls : undefined
});
return completeText;
} catch (error) {
log.error(`Error in Anthropic streaming: ${error}`);
// More detailed error logging
if (error instanceof Error) {
log.error(`[DEBUG] Error name: ${error.name}`);
log.error(`[DEBUG] Error message: ${error.message}`);
log.error(`[DEBUG] Error stack: ${error.stack}`);
// If there's response data in the error, log that too
const anyError = error as any;
if (anyError.response) {
log.error(`Error response status: ${anyError.response.status}`);
log.error(`Error response data: ${JSON.stringify(anyError.response.data)}`);
}
}
throw error;
}
};
@ -198,4 +347,70 @@ export class AnthropicService extends BaseAIService {
return anthropicMessages;
}
}
/**
* Convert OpenAI-style function tools to Anthropic format
* OpenAI uses: { type: "function", function: { name, description, parameters } }
* Anthropic uses: { name, description, input_schema }
*/
private convertToolsToAnthropicFormat(tools: any[]): any[] {
if (!tools || tools.length === 0) {
return [];
}
// Filter out invalid tools
const validTools = tools.filter(tool => {
if (!tool || typeof tool !== 'object') {
log.error(`Invalid tool format (not an object)`);
return false;
}
// For function tools, validate required fields
if (tool.type === 'function') {
if (!tool.function || !tool.function.name) {
log.error(`Function tool missing required fields`);
return false;
}
}
return true;
});
if (validTools.length < tools.length) {
log.info(`Filtered out ${tools.length - validTools.length} invalid tools`);
}
// Convert tools to Anthropic format
return validTools.map((tool: any) => {
// Convert from OpenAI format to Anthropic format
if (tool.type === 'function' && tool.function) {
return {
name: tool.function.name,
description: tool.function.description || '',
input_schema: tool.function.parameters || {}
};
}
// Handle already converted Anthropic format (from our temporary fix)
if (tool.type === 'custom' && tool.custom) {
return {
name: tool.custom.name,
description: tool.custom.description || '',
input_schema: tool.custom.parameters || {}
};
}
// If the tool is already in the correct Anthropic format
if (tool.name && (tool.input_schema || tool.parameters)) {
return {
name: tool.name,
description: tool.description || '',
input_schema: tool.input_schema || tool.parameters
};
}
log.error(`Unhandled tool format encountered`);
return null;
}).filter(Boolean); // Filter out any null values
}
}