From c49883fdfa1840e9126779b06300c8c92ebc77f4 Mon Sep 17 00:00:00 2001 From: perf3ct Date: Wed, 26 Mar 2025 17:56:37 +0000 Subject: [PATCH] move constants to their own files and folder --- src/routes/api/anthropic.ts | 15 ++++---- src/routes/api/llm.ts | 2 +- .../agent_tools/contextual_thinking_tool.ts | 2 +- .../agent_tools/query_decomposition_tool.ts | 2 +- src/services/llm/base_ai_service.ts | 2 +- .../llm_prompt_constants.ts | 0 .../llm/constants/provider_constants.ts | 35 +++++++++++++++++++ .../llm/context/modules/context_formatter.ts | 2 +- .../llm/context/modules/context_service.ts | 2 +- .../llm/context/modules/query_enhancer.ts | 2 +- src/services/llm/index_service.ts | 2 +- .../llm/providers/anthropic_service.ts | 15 +++----- 12 files changed, 56 insertions(+), 25 deletions(-) rename src/services/llm/{prompts => constants}/llm_prompt_constants.ts (100%) create mode 100644 src/services/llm/constants/provider_constants.ts diff --git a/src/routes/api/anthropic.ts b/src/routes/api/anthropic.ts index e8fed4fda..e314988c0 100644 --- a/src/routes/api/anthropic.ts +++ b/src/routes/api/anthropic.ts @@ -2,6 +2,7 @@ import axios from 'axios'; import options from "../../services/options.js"; import log from "../../services/log.js"; import type { Request, Response } from "express"; +import { PROVIDER_CONSTANTS } from '../../services/llm/constants/provider_constants.js'; // Map of simplified model names to full model names with versions const MODEL_MAPPING: Record = { @@ -26,7 +27,7 @@ async function listModels(req: Request, res: Response) { const { baseUrl } = req.body; // Use provided base URL or default from options, and ensure correct formatting - let anthropicBaseUrl = baseUrl || await options.getOption('anthropicBaseUrl') || 'https://api.anthropic.com'; + let anthropicBaseUrl = baseUrl || await options.getOption('anthropicBaseUrl') || PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL; // Ensure base URL doesn't already include '/v1' and is properly formatted anthropicBaseUrl = anthropicBaseUrl.replace(/\/+$/, '').replace(/\/v1$/, ''); @@ -43,8 +44,8 @@ async function listModels(req: Request, res: Response) { headers: { 'Content-Type': 'application/json', 'X-Api-Key': apiKey, - 'anthropic-version': '2023-06-01', - 'anthropic-beta': 'messages-2023-12-15' + 'anthropic-version': PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION, + 'anthropic-beta': PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION }, timeout: 10000 }); @@ -77,12 +78,12 @@ async function listModels(req: Request, res: Response) { }); // Also include known models that might not be returned by the API - for (const [simpleName, fullName] of Object.entries(MODEL_MAPPING)) { + for (const model of PROVIDER_CONSTANTS.ANTHROPIC.AVAILABLE_MODELS) { // Check if this model is already in our list - if (!chatModels.some((m: AnthropicModel) => m.id === fullName)) { + if (!chatModels.some((m: AnthropicModel) => m.id === model.id)) { chatModels.push({ - id: fullName, - name: simpleName, + id: model.id, + name: model.name, type: 'chat' }); } diff --git a/src/routes/api/llm.ts b/src/routes/api/llm.ts index 8be66484f..c4bf97603 100644 --- a/src/routes/api/llm.ts +++ b/src/routes/api/llm.ts @@ -13,7 +13,7 @@ import contextService from "../../services/llm/context_service.js"; import sql from "../../services/sql.js"; // Import the index service for knowledge base management import indexService from "../../services/llm/index_service.js"; -import { CONTEXT_PROMPTS } from '../../services/llm/prompts/llm_prompt_constants.js'; +import { CONTEXT_PROMPTS } from '../../services/llm/constants/llm_prompt_constants.js'; // LLM service constants export const LLM_CONSTANTS = { diff --git a/src/services/llm/agent_tools/contextual_thinking_tool.ts b/src/services/llm/agent_tools/contextual_thinking_tool.ts index 65acf334a..113f0e165 100644 --- a/src/services/llm/agent_tools/contextual_thinking_tool.ts +++ b/src/services/llm/agent_tools/contextual_thinking_tool.ts @@ -15,7 +15,7 @@ import log from "../../log.js"; import aiServiceManager from "../ai_service_manager.js"; -import { AGENT_TOOL_PROMPTS } from '../prompts/llm_prompt_constants.js'; +import { AGENT_TOOL_PROMPTS } from '../constants/llm_prompt_constants.js'; /** * Represents a single reasoning step taken by the agent diff --git a/src/services/llm/agent_tools/query_decomposition_tool.ts b/src/services/llm/agent_tools/query_decomposition_tool.ts index 99c871afe..2e65ada76 100644 --- a/src/services/llm/agent_tools/query_decomposition_tool.ts +++ b/src/services/llm/agent_tools/query_decomposition_tool.ts @@ -13,7 +13,7 @@ */ import log from '../../log.js'; -import { AGENT_TOOL_PROMPTS } from '../prompts/llm_prompt_constants.js'; +import { AGENT_TOOL_PROMPTS } from '../constants/llm_prompt_constants.js'; export interface SubQuery { id: string; diff --git a/src/services/llm/base_ai_service.ts b/src/services/llm/base_ai_service.ts index daf63a7ec..3c6e05bc7 100644 --- a/src/services/llm/base_ai_service.ts +++ b/src/services/llm/base_ai_service.ts @@ -1,6 +1,6 @@ import options from '../options.js'; import type { AIService, ChatCompletionOptions, ChatResponse, Message } from './ai_interface.js'; -import { DEFAULT_SYSTEM_PROMPT } from './prompts/llm_prompt_constants.js'; +import { DEFAULT_SYSTEM_PROMPT } from './constants/llm_prompt_constants.js'; export abstract class BaseAIService implements AIService { protected name: string; diff --git a/src/services/llm/prompts/llm_prompt_constants.ts b/src/services/llm/constants/llm_prompt_constants.ts similarity index 100% rename from src/services/llm/prompts/llm_prompt_constants.ts rename to src/services/llm/constants/llm_prompt_constants.ts diff --git a/src/services/llm/constants/provider_constants.ts b/src/services/llm/constants/provider_constants.ts new file mode 100644 index 000000000..43db2b1c0 --- /dev/null +++ b/src/services/llm/constants/provider_constants.ts @@ -0,0 +1,35 @@ +export const PROVIDER_CONSTANTS = { + ANTHROPIC: { + API_VERSION: '2023-06-01', + BETA_VERSION: 'messages-2023-12-15', + BASE_URL: 'https://api.anthropic.com', + DEFAULT_MODEL: 'claude-3-haiku-20240307', + // These are the currently available models from Anthropic + AVAILABLE_MODELS: [ + { + id: 'claude-3-opus-20240229', + name: 'Claude 3 Opus', + description: 'Most capable model for highly complex tasks', + maxTokens: 4096 + }, + { + id: 'claude-3-sonnet-20240229', + name: 'Claude 3 Sonnet', + description: 'Ideal balance of intelligence and speed', + maxTokens: 4096 + }, + { + id: 'claude-3-haiku-20240307', + name: 'Claude 3 Haiku', + description: 'Fastest and most compact model', + maxTokens: 4096 + }, + { + id: 'claude-2.1', + name: 'Claude 2.1', + description: 'Previous generation model', + maxTokens: 4096 + } + ] + } +} as const; diff --git a/src/services/llm/context/modules/context_formatter.ts b/src/services/llm/context/modules/context_formatter.ts index 0e89952dd..11898c3d9 100644 --- a/src/services/llm/context/modules/context_formatter.ts +++ b/src/services/llm/context/modules/context_formatter.ts @@ -1,6 +1,6 @@ import sanitizeHtml from 'sanitize-html'; import log from '../../../log.js'; -import { CONTEXT_PROMPTS } from '../../prompts/llm_prompt_constants.js'; +import { CONTEXT_PROMPTS } from '../../constants/llm_prompt_constants.js'; // Constants for context window sizes, defines in-module to avoid circular dependencies const CONTEXT_WINDOW = { diff --git a/src/services/llm/context/modules/context_service.ts b/src/services/llm/context/modules/context_service.ts index baefc2691..87d97c19a 100644 --- a/src/services/llm/context/modules/context_service.ts +++ b/src/services/llm/context/modules/context_service.ts @@ -6,7 +6,7 @@ import queryEnhancer from './query_enhancer.js'; import contextFormatter from './context_formatter.js'; import aiServiceManager from '../../ai_service_manager.js'; import { ContextExtractor } from '../index.js'; -import { CONTEXT_PROMPTS } from '../../prompts/llm_prompt_constants.js'; +import { CONTEXT_PROMPTS } from '../../constants/llm_prompt_constants.js'; import becca from '../../../../becca/becca.js'; /** diff --git a/src/services/llm/context/modules/query_enhancer.ts b/src/services/llm/context/modules/query_enhancer.ts index 6c63ff660..72a2a6639 100644 --- a/src/services/llm/context/modules/query_enhancer.ts +++ b/src/services/llm/context/modules/query_enhancer.ts @@ -1,7 +1,7 @@ import log from '../../../log.js'; import cacheManager from './cache_manager.js'; import type { Message } from '../../ai_interface.js'; -import { CONTEXT_PROMPTS } from '../../prompts/llm_prompt_constants.js'; +import { CONTEXT_PROMPTS } from '../../constants/llm_prompt_constants.js'; /** * Provides utilities for enhancing queries and generating search queries diff --git a/src/services/llm/index_service.ts b/src/services/llm/index_service.ts index c67f6e5b8..a5446244f 100644 --- a/src/services/llm/index_service.ts +++ b/src/services/llm/index_service.ts @@ -20,7 +20,7 @@ import type { NoteEmbeddingContext } from "./embeddings/embeddings_interface.js" import type { OptionDefinitions } from "../options_interface.js"; import sql from "../sql.js"; import sqlInit from "../sql_init.js"; -import { CONTEXT_PROMPTS } from './prompts/llm_prompt_constants.js'; +import { CONTEXT_PROMPTS } from './constants/llm_prompt_constants.js'; class IndexService { private initialized = false; diff --git a/src/services/llm/providers/anthropic_service.ts b/src/services/llm/providers/anthropic_service.ts index c4b5abc00..83d2f9895 100644 --- a/src/services/llm/providers/anthropic_service.ts +++ b/src/services/llm/providers/anthropic_service.ts @@ -1,6 +1,7 @@ import options from '../../options.js'; import { BaseAIService } from '../base_ai_service.js'; import type { ChatCompletionOptions, ChatResponse, Message } from '../ai_interface.js'; +import { PROVIDER_CONSTANTS } from '../constants/provider_constants.js'; export class AnthropicService extends BaseAIService { // Map of simplified model names to full model names with versions @@ -25,14 +26,8 @@ export class AnthropicService extends BaseAIService { } const apiKey = options.getOption('anthropicApiKey'); - const baseUrl = options.getOption('anthropicBaseUrl') || 'https://api.anthropic.com'; - let model = opts.model || options.getOption('anthropicDefaultModel') || 'claude-3-haiku-20240307'; - - // Apply model name mapping if needed - if (AnthropicService.MODEL_MAPPING[model]) { - model = AnthropicService.MODEL_MAPPING[model]; - console.log(`Mapped model name to: ${model}`); - } + const baseUrl = options.getOption('anthropicBaseUrl') || PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL; + const model = opts.model || options.getOption('anthropicDefaultModel') || PROVIDER_CONSTANTS.ANTHROPIC.DEFAULT_MODEL; const temperature = opts.temperature !== undefined ? opts.temperature @@ -56,8 +51,8 @@ export class AnthropicService extends BaseAIService { headers: { 'Content-Type': 'application/json', 'X-Api-Key': apiKey, - 'anthropic-version': '2023-06-01', - 'anthropic-beta': 'messages-2023-12-15' + 'anthropic-version': PROVIDER_CONSTANTS.ANTHROPIC.API_VERSION, + 'anthropic-beta': PROVIDER_CONSTANTS.ANTHROPIC.BETA_VERSION }, body: JSON.stringify({ model,