mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-27 18:12:29 +08:00
also rip out our custom anthropic implementation
This commit is contained in:
parent
53223b5750
commit
f71351db6a
95
package-lock.json
generated
95
package-lock.json
generated
@ -9,6 +9,7 @@
|
||||
"version": "0.92.6",
|
||||
"license": "AGPL-3.0-only",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "0.39.0",
|
||||
"@braintree/sanitize-url": "7.1.1",
|
||||
"@electron/remote": "2.1.2",
|
||||
"@highlightjs/cdn-assets": "11.11.1",
|
||||
@ -255,6 +256,36 @@
|
||||
"url": "https://github.com/sponsors/antfu"
|
||||
}
|
||||
},
|
||||
"node_modules/@anthropic-ai/sdk": {
|
||||
"version": "0.39.0",
|
||||
"resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.39.0.tgz",
|
||||
"integrity": "sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/node-fetch": "^2.6.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"agentkeepalive": "^4.2.1",
|
||||
"form-data-encoder": "1.7.2",
|
||||
"formdata-node": "^4.3.2",
|
||||
"node-fetch": "^2.6.7"
|
||||
}
|
||||
},
|
||||
"node_modules/@anthropic-ai/sdk/node_modules/@types/node": {
|
||||
"version": "18.19.86",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.86.tgz",
|
||||
"integrity": "sha512-fifKayi175wLyKyc5qUfyENhQ1dCNI1UNjp653d8kuYcPQN5JhX3dGuP/XmvPTg/xRBn1VTLpbmi+H/Mr7tLfQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@anthropic-ai/sdk/node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@apidevtools/json-schema-ref-parser": {
|
||||
"version": "9.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.2.tgz",
|
||||
@ -5272,6 +5303,16 @@
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
"version": "2.6.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz",
|
||||
"integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/prop-types": {
|
||||
"version": "15.7.14",
|
||||
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.14.tgz",
|
||||
@ -6260,7 +6301,6 @@
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.5.0.tgz",
|
||||
"integrity": "sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"humanize-ms": "^1.2.1"
|
||||
@ -10364,7 +10404,6 @@
|
||||
"version": "0.1.13",
|
||||
"resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
|
||||
"integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
@ -11725,6 +11764,12 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data-encoder": {
|
||||
"version": "1.7.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.2.tgz",
|
||||
"integrity": "sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/form-data/node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
@ -11746,6 +11791,19 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/formdata-node": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/formdata-node/-/formdata-node-4.4.1.tgz",
|
||||
"integrity": "sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"node-domexception": "1.0.0",
|
||||
"web-streams-polyfill": "4.0.0-beta.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12.20"
|
||||
}
|
||||
},
|
||||
"node_modules/formidable": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/formidable/-/formidable-3.5.2.tgz",
|
||||
@ -12721,7 +12779,6 @@
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz",
|
||||
"integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.0.0"
|
||||
@ -15560,11 +15617,29 @@
|
||||
"semver": "^7.3.5"
|
||||
}
|
||||
},
|
||||
"node_modules/node-domexception": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
||||
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/jimmywarting"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://paypal.me/jimmywarting"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
@ -15585,21 +15660,18 @@
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/node-fetch/node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/node-fetch/node_modules/whatwg-url": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tr46": "~0.0.3",
|
||||
@ -21109,6 +21181,15 @@
|
||||
"defaults": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/web-streams-polyfill": {
|
||||
"version": "4.0.0-beta.3",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz",
|
||||
"integrity": "sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/web-worker": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.5.0.tgz",
|
||||
|
@ -71,6 +71,7 @@
|
||||
"chore:generate-openapi": "tsx bin/generate-openapi.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "0.39.0",
|
||||
"@braintree/sanitize-url": "7.1.1",
|
||||
"@electron/remote": "2.1.2",
|
||||
"@highlightjs/cdn-assets": "11.11.1",
|
||||
|
@ -1,8 +1,8 @@
|
||||
import axios from 'axios';
|
||||
import options from "../../services/options.js";
|
||||
import log from "../../services/log.js";
|
||||
import type { Request, Response } from "express";
|
||||
import { PROVIDER_CONSTANTS } from '../../services/llm/constants/provider_constants.js';
|
||||
import Anthropic from '@anthropic-ai/sdk';
|
||||
|
||||
// Interface for Anthropic model entries
|
||||
interface AnthropicModel {
|
||||
@ -69,10 +69,10 @@ async function listModels(req: Request, res: Response) {
|
||||
try {
|
||||
const { baseUrl } = req.body;
|
||||
|
||||
// Use provided base URL or default from options, and ensure correct formatting
|
||||
let anthropicBaseUrl = baseUrl || await options.getOption('anthropicBaseUrl') || PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL;
|
||||
// Ensure base URL doesn't already include '/v1' and is properly formatted
|
||||
anthropicBaseUrl = anthropicBaseUrl.replace(/\/+$/, '').replace(/\/v1$/, '');
|
||||
// Use provided base URL or default from options
|
||||
const anthropicBaseUrl = baseUrl ||
|
||||
await options.getOption('anthropicBaseUrl') ||
|
||||
PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL;
|
||||
|
||||
const apiKey = await options.getOption('anthropicApiKey');
|
||||
|
||||
@ -80,32 +80,34 @@ async function listModels(req: Request, res: Response) {
|
||||
throw new Error('Anthropic API key is not configured');
|
||||
}
|
||||
|
||||
log.info(`Listing models from Anthropic API at: ${anthropicBaseUrl}/v1/models`);
|
||||
log.info(`Listing models from Anthropic API using the SDK`);
|
||||
|
||||
// Call Anthropic API to get models
|
||||
const response = await axios.get(`${anthropicBaseUrl}/v1/models`, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': apiKey,
|
||||
// Initialize the Anthropic client with the SDK
|
||||
const client = new Anthropic({
|
||||
apiKey,
|
||||
baseURL: anthropicBaseUrl,
|
||||
defaultHeaders: {
|
||||
'anthropic-version': PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION,
|
||||
'anthropic-beta': PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION
|
||||
},
|
||||
timeout: 10000
|
||||
}
|
||||
});
|
||||
|
||||
// Use the SDK's built-in models listing
|
||||
const response = await client.models.list();
|
||||
|
||||
// Process the models
|
||||
const allModels = response.data.models || [];
|
||||
const allModels = response.data || [];
|
||||
|
||||
// Log available models
|
||||
log.info(`Found ${allModels.length} models from Anthropic: ${allModels.map((m: any) => m.id).join(', ')}`);
|
||||
log.info(`Found ${allModels.length} models from Anthropic: ${allModels.map(m => m.id).join(', ')}`);
|
||||
|
||||
// Separate models into chat models and embedding models
|
||||
const chatModels = allModels
|
||||
.filter((model: any) =>
|
||||
.filter(model =>
|
||||
// Claude models are for chat
|
||||
model.id.includes('claude')
|
||||
)
|
||||
.map((model: any) => {
|
||||
.map(model => {
|
||||
// Get a simplified name for display purposes
|
||||
let displayName = model.id;
|
||||
// Try to simplify the model name by removing version suffixes
|
||||
@ -134,11 +136,11 @@ async function listModels(req: Request, res: Response) {
|
||||
|
||||
// Note: Anthropic might not have embedding models yet, but we'll include this for future compatibility
|
||||
const embeddingModels = allModels
|
||||
.filter((model: any) =>
|
||||
.filter(model =>
|
||||
// If Anthropic releases embedding models, they'd likely include 'embed' in the name
|
||||
model.id.includes('embed')
|
||||
)
|
||||
.map((model: any) => ({
|
||||
.map(model => ({
|
||||
id: model.id,
|
||||
name: model.id,
|
||||
type: 'embedding'
|
||||
|
@ -1,203 +0,0 @@
|
||||
import sanitizeHtml from 'sanitize-html';
|
||||
import type { Message } from '../ai_interface.js';
|
||||
import { BaseMessageFormatter } from './base_formatter.js';
|
||||
import { PROVIDER_PROMPTS } from '../constants/llm_prompt_constants.js';
|
||||
import { LLM_CONSTANTS } from '../constants/provider_constants.js';
|
||||
import {
|
||||
HTML_ALLOWED_TAGS,
|
||||
HTML_ALLOWED_ATTRIBUTES,
|
||||
FORMATTER_LOGS,
|
||||
HTML_TO_MARKDOWN_PATTERNS,
|
||||
HTML_ENTITY_REPLACEMENTS
|
||||
} from '../constants/formatter_constants.js';
|
||||
|
||||
/**
|
||||
* Anthropic-specific message formatter
|
||||
* Optimized for Claude's API and preferences
|
||||
*/
|
||||
export class AnthropicMessageFormatter extends BaseMessageFormatter {
|
||||
/**
|
||||
* Maximum recommended context length for Anthropic models
|
||||
* Claude has a very large context window
|
||||
*/
|
||||
private static MAX_CONTEXT_LENGTH = LLM_CONSTANTS.CONTEXT_WINDOW.ANTHROPIC;
|
||||
|
||||
/**
|
||||
* Format messages for the Anthropic API
|
||||
*/
|
||||
formatMessages(messages: Message[], systemPrompt?: string, context?: string): Message[] {
|
||||
const formattedMessages: Message[] = [];
|
||||
|
||||
// For Anthropic, system prompts work best as the first user message with <instructions> XML tags
|
||||
// First, collect all non-system messages
|
||||
const userAssistantMessages = messages.filter(msg => msg.role === 'user' || msg.role === 'assistant');
|
||||
|
||||
// For Anthropic, we need to handle context differently
|
||||
// 1. If explicit context is provided, we format it with XML tags
|
||||
if (context) {
|
||||
// Build the system message with context
|
||||
const formattedContext = PROVIDER_PROMPTS.ANTHROPIC.SYSTEM_WITH_CONTEXT(
|
||||
this.cleanContextContent(context)
|
||||
);
|
||||
|
||||
// If there's at least one user message, add the context to the first one
|
||||
if (userAssistantMessages.length > 0 && userAssistantMessages[0].role === 'user') {
|
||||
// Add system as a new first message
|
||||
formattedMessages.push({
|
||||
role: 'user',
|
||||
content: formattedContext
|
||||
});
|
||||
|
||||
// Add system response acknowledgment
|
||||
formattedMessages.push({
|
||||
role: 'assistant',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.CONTEXT_ACKNOWLEDGMENT
|
||||
});
|
||||
|
||||
// Add remaining messages
|
||||
for (const msg of userAssistantMessages) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
// If no user messages, create a placeholder
|
||||
else {
|
||||
formattedMessages.push({
|
||||
role: 'user',
|
||||
content: formattedContext
|
||||
});
|
||||
|
||||
formattedMessages.push({
|
||||
role: 'assistant',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.CONTEXT_QUERY_ACKNOWLEDGMENT
|
||||
});
|
||||
|
||||
// Add any existing assistant messages if they exist
|
||||
const assistantMsgs = userAssistantMessages.filter(msg => msg.role === 'assistant');
|
||||
for (const msg of assistantMsgs) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
// 2. If no explicit context but we have system messages, convert them to Claude format
|
||||
else if (messages.some(msg => msg.role === 'system')) {
|
||||
// Get system messages
|
||||
const systemMessages = messages.filter(msg => msg.role === 'system');
|
||||
|
||||
// Build system content with XML tags
|
||||
const systemContent = PROVIDER_PROMPTS.ANTHROPIC.INSTRUCTIONS_WRAPPER(
|
||||
systemMessages.map(msg => this.cleanContextContent(msg.content)).join('\n\n')
|
||||
);
|
||||
|
||||
// Add as first user message
|
||||
formattedMessages.push({
|
||||
role: 'user',
|
||||
content: systemContent
|
||||
});
|
||||
|
||||
// Add assistant acknowledgment
|
||||
formattedMessages.push({
|
||||
role: 'assistant',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.ACKNOWLEDGMENT
|
||||
});
|
||||
|
||||
// Add remaining user/assistant messages
|
||||
for (const msg of userAssistantMessages) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
// 3. Just a system prompt, no context
|
||||
else if (systemPrompt) {
|
||||
// Add as first user message with XML tags
|
||||
formattedMessages.push({
|
||||
role: 'user',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.INSTRUCTIONS_WRAPPER(systemPrompt)
|
||||
});
|
||||
|
||||
// Add assistant acknowledgment
|
||||
formattedMessages.push({
|
||||
role: 'assistant',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.ACKNOWLEDGMENT
|
||||
});
|
||||
|
||||
// Add all other messages
|
||||
for (const msg of userAssistantMessages) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
// 4. No system prompt, use default from constants
|
||||
else if (userAssistantMessages.length > 0) {
|
||||
// Add default system prompt with XML tags
|
||||
formattedMessages.push({
|
||||
role: 'user',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.INSTRUCTIONS_WRAPPER(this.getDefaultSystemPrompt())
|
||||
});
|
||||
|
||||
// Add assistant acknowledgment
|
||||
formattedMessages.push({
|
||||
role: 'assistant',
|
||||
content: PROVIDER_PROMPTS.ANTHROPIC.ACKNOWLEDGMENT
|
||||
});
|
||||
|
||||
// Add all user messages
|
||||
for (const msg of userAssistantMessages) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
// 5. No special handling needed
|
||||
else {
|
||||
// Just add all messages as-is
|
||||
for (const msg of userAssistantMessages) {
|
||||
formattedMessages.push(msg);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(FORMATTER_LOGS.ANTHROPIC.PROCESSED(messages.length, formattedMessages.length));
|
||||
return formattedMessages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean context content for Anthropic
|
||||
* Claude works well with XML-structured content
|
||||
*/
|
||||
cleanContextContent(content: string): string {
|
||||
if (!content) return '';
|
||||
|
||||
try {
|
||||
// Convert HTML to a Claude-friendly format
|
||||
const cleaned = sanitizeHtml(content, {
|
||||
allowedTags: HTML_ALLOWED_TAGS.STANDARD,
|
||||
allowedAttributes: HTML_ALLOWED_ATTRIBUTES.STANDARD
|
||||
});
|
||||
|
||||
// Convert to markdown but preserve some structure
|
||||
let markdown = cleaned;
|
||||
|
||||
// Apply all standard HTML to Markdown patterns
|
||||
const patterns = HTML_TO_MARKDOWN_PATTERNS;
|
||||
for (const pattern of Object.values(patterns)) {
|
||||
markdown = markdown.replace(pattern.pattern, pattern.replacement);
|
||||
}
|
||||
|
||||
// Process lists - use the parent class method
|
||||
markdown = this.processListItems(markdown);
|
||||
|
||||
// Fix common HTML entities
|
||||
const entityPatterns = HTML_ENTITY_REPLACEMENTS;
|
||||
for (const pattern of Object.values(entityPatterns)) {
|
||||
markdown = markdown.replace(pattern.pattern, pattern.replacement);
|
||||
}
|
||||
|
||||
return markdown.trim();
|
||||
} catch (error) {
|
||||
console.error(FORMATTER_LOGS.ERROR.CONTEXT_CLEANING("Anthropic"), error);
|
||||
return content; // Return original if cleaning fails
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum recommended context length for Anthropic
|
||||
*/
|
||||
getMaxContextLength(): number {
|
||||
return AnthropicMessageFormatter.MAX_CONTEXT_LENGTH;
|
||||
}
|
||||
}
|
@ -1,17 +1,15 @@
|
||||
import options from '../../options.js';
|
||||
import { BaseAIService } from '../base_ai_service.js';
|
||||
import type { ChatCompletionOptions, ChatResponse, Message } from '../ai_interface.js';
|
||||
import type { ChatCompletionOptions, ChatResponse, Message, StreamChunk } from '../ai_interface.js';
|
||||
import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js';
|
||||
import type { AnthropicOptions } from './provider_options.js';
|
||||
import { getAnthropicOptions } from './providers.js';
|
||||
import log from '../../log.js';
|
||||
|
||||
interface AnthropicMessage {
|
||||
role: string;
|
||||
content: string;
|
||||
}
|
||||
import Anthropic from '@anthropic-ai/sdk';
|
||||
|
||||
export class AnthropicService extends BaseAIService {
|
||||
private client: any = null;
|
||||
|
||||
constructor() {
|
||||
super('Anthropic');
|
||||
}
|
||||
@ -20,6 +18,20 @@ export class AnthropicService extends BaseAIService {
|
||||
return super.isAvailable() && !!options.getOption('anthropicApiKey');
|
||||
}
|
||||
|
||||
private getClient(apiKey: string, baseUrl: string, apiVersion?: string, betaVersion?: string): any {
|
||||
if (!this.client) {
|
||||
this.client = new Anthropic({
|
||||
apiKey,
|
||||
baseURL: baseUrl,
|
||||
defaultHeaders: {
|
||||
'anthropic-version': apiVersion || PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION,
|
||||
'anthropic-beta': betaVersion || PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION
|
||||
}
|
||||
});
|
||||
}
|
||||
return this.client;
|
||||
}
|
||||
|
||||
async generateChatCompletion(messages: Message[], opts: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
||||
if (!this.isAvailable()) {
|
||||
throw new Error('Anthropic service is not available. Check API key and AI settings.');
|
||||
@ -39,104 +51,163 @@ export class AnthropicService extends BaseAIService {
|
||||
}
|
||||
}
|
||||
|
||||
// Get system prompt
|
||||
const systemPrompt = this.getSystemPrompt(providerOptions.systemPrompt || options.getOption('aiSystemPrompt'));
|
||||
|
||||
// Format for Anthropic's API
|
||||
const formattedMessages = this.formatMessages(messages, systemPrompt);
|
||||
|
||||
// Store the formatted messages in the provider options for future reference
|
||||
providerOptions.formattedMessages = formattedMessages;
|
||||
// Format messages for Anthropic's API
|
||||
const anthropicMessages = this.formatMessages(messages);
|
||||
|
||||
try {
|
||||
// Ensure base URL doesn't already include '/v1' and build the complete endpoint
|
||||
const cleanBaseUrl = providerOptions.baseUrl.replace(/\/+$/, '').replace(/\/v1$/, '');
|
||||
const endpoint = `${cleanBaseUrl}/v1/messages`;
|
||||
// Initialize the Anthropic client
|
||||
const client = this.getClient(
|
||||
providerOptions.apiKey,
|
||||
providerOptions.baseUrl,
|
||||
providerOptions.apiVersion,
|
||||
providerOptions.betaVersion
|
||||
);
|
||||
|
||||
console.log(`Anthropic API endpoint: ${endpoint}`);
|
||||
console.log(`Using model: ${providerOptions.model}`);
|
||||
log.info(`Using Anthropic API with model: ${providerOptions.model}`);
|
||||
|
||||
// Create request body directly from provider options
|
||||
const requestBody: any = {
|
||||
// Configure request parameters
|
||||
const requestParams = {
|
||||
model: providerOptions.model,
|
||||
messages: formattedMessages.messages,
|
||||
system: formattedMessages.system,
|
||||
};
|
||||
|
||||
// Extract API parameters from provider options
|
||||
const apiParams = {
|
||||
messages: anthropicMessages,
|
||||
system: systemPrompt,
|
||||
max_tokens: providerOptions.max_tokens || 4096,
|
||||
temperature: providerOptions.temperature,
|
||||
max_tokens: providerOptions.max_tokens,
|
||||
stream: providerOptions.stream,
|
||||
top_p: providerOptions.top_p
|
||||
top_p: providerOptions.top_p,
|
||||
stream: !!providerOptions.stream
|
||||
};
|
||||
|
||||
// Merge API parameters, filtering out undefined values
|
||||
Object.entries(apiParams).forEach(([key, value]) => {
|
||||
if (value !== undefined) {
|
||||
requestBody[key] = value;
|
||||
}
|
||||
});
|
||||
// Handle streaming responses
|
||||
if (providerOptions.stream) {
|
||||
return this.handleStreamingResponse(client, requestParams, opts, providerOptions);
|
||||
} else {
|
||||
// Non-streaming request
|
||||
const response = await client.messages.create(requestParams);
|
||||
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': providerOptions.apiKey,
|
||||
'anthropic-version': providerOptions.apiVersion || PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION,
|
||||
'anthropic-beta': providerOptions.betaVersion || PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION
|
||||
},
|
||||
body: JSON.stringify(requestBody)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorBody = await response.text();
|
||||
console.error(`Anthropic API error (${response.status}): ${errorBody}`);
|
||||
throw new Error(`Anthropic API error: ${response.status} ${response.statusText} - ${errorBody}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
// Get the assistant's response text from the content blocks
|
||||
const textContent = response.content
|
||||
.filter((block: any) => block.type === 'text')
|
||||
.map((block: any) => block.text)
|
||||
.join('');
|
||||
|
||||
return {
|
||||
text: data.content[0].text,
|
||||
model: data.model,
|
||||
text: textContent,
|
||||
model: response.model,
|
||||
provider: this.getName(),
|
||||
usage: {
|
||||
// Anthropic doesn't provide token usage in the same format as OpenAI
|
||||
// but we can still estimate based on input/output length
|
||||
totalTokens: data.usage?.input_tokens + data.usage?.output_tokens
|
||||
// Anthropic provides token counts in the response
|
||||
promptTokens: response.usage?.input_tokens,
|
||||
completionTokens: response.usage?.output_tokens,
|
||||
totalTokens: (response.usage?.input_tokens || 0) + (response.usage?.output_tokens || 0)
|
||||
}
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Anthropic service error:', error);
|
||||
log.error(`Anthropic service error: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle streaming response from Anthropic
|
||||
*/
|
||||
private async handleStreamingResponse(
|
||||
client: any,
|
||||
params: any,
|
||||
opts: ChatCompletionOptions,
|
||||
providerOptions: AnthropicOptions
|
||||
): Promise<ChatResponse> {
|
||||
let completeText = '';
|
||||
|
||||
// Create a function that will return a Promise that resolves with the final text
|
||||
const streamHandler = async (callback: (chunk: StreamChunk) => Promise<void> | void): Promise<string> => {
|
||||
try {
|
||||
const streamResponse = await client.messages.create({
|
||||
...params,
|
||||
stream: true
|
||||
});
|
||||
|
||||
for await (const chunk of streamResponse) {
|
||||
if (chunk.type === 'content_block_delta' && chunk.delta?.type === 'text_delta') {
|
||||
const text = chunk.delta.text || '';
|
||||
completeText += text;
|
||||
|
||||
// Call the callback with the chunk
|
||||
await callback({
|
||||
text,
|
||||
done: false,
|
||||
usage: {} // Usage stats not available in chunks
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Signal completion
|
||||
await callback({
|
||||
text: '',
|
||||
done: true,
|
||||
usage: {
|
||||
// We don't have token usage information in streaming mode from the chunks
|
||||
totalTokens: completeText.length / 4 // Rough estimate
|
||||
}
|
||||
});
|
||||
|
||||
return completeText;
|
||||
} catch (error) {
|
||||
log.error(`Error in Anthropic streaming: ${error}`);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// If a stream callback was provided in the options, set up immediate streaming
|
||||
if (opts.streamCallback) {
|
||||
// Start streaming in the background
|
||||
void streamHandler(async (chunk) => {
|
||||
if (opts.streamCallback) {
|
||||
await opts.streamCallback(chunk.text, chunk.done);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
text: completeText, // This will be empty initially until streaming completes
|
||||
model: providerOptions.model,
|
||||
provider: this.getName(),
|
||||
stream: streamHandler,
|
||||
usage: {
|
||||
// We don't have token counts initially with streaming
|
||||
totalTokens: 0
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format messages for the Anthropic API
|
||||
*/
|
||||
private formatMessages(messages: Message[], systemPrompt: string): { messages: AnthropicMessage[], system: string } {
|
||||
const formattedMessages: AnthropicMessage[] = [];
|
||||
|
||||
// Extract the system message if present
|
||||
let sysPrompt = systemPrompt;
|
||||
private formatMessages(messages: Message[]): any[] {
|
||||
const anthropicMessages: any[] = [];
|
||||
|
||||
// Process each message
|
||||
for (const msg of messages) {
|
||||
if (msg.role === 'system') {
|
||||
// Anthropic handles system messages separately
|
||||
sysPrompt = msg.content;
|
||||
} else {
|
||||
formattedMessages.push({
|
||||
// System messages are handled separately in the API call
|
||||
continue;
|
||||
} else if (msg.role === 'user' || msg.role === 'assistant') {
|
||||
// Convert to Anthropic format
|
||||
anthropicMessages.push({
|
||||
role: msg.role,
|
||||
content: msg.content
|
||||
});
|
||||
} else if (msg.role === 'tool') {
|
||||
// Tool response messages - typically follow a tool call from the assistant
|
||||
anthropicMessages.push({
|
||||
role: 'user',
|
||||
content: msg.content
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
messages: formattedMessages,
|
||||
system: sysPrompt
|
||||
};
|
||||
return anthropicMessages;
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user