diff --git a/src/routes/api/llm.ts b/src/routes/api/llm.ts index f7c6ba326..e15859ed7 100644 --- a/src/routes/api/llm.ts +++ b/src/routes/api/llm.ts @@ -13,6 +13,7 @@ import contextService from "../../services/llm/context_service.js"; import sql from "../../services/sql.js"; // Import the index service for knowledge base management import indexService from "../../services/llm/index_service.js"; +import { CONTEXT_PROMPTS } from '../../services/llm/llm_prompt_constants.js'; // LLM service constants export const LLM_CONSTANTS = { @@ -504,7 +505,7 @@ async function findRelevantNotes(content: string, contextNoteId: string | null = } /** - * Build context from notes + * Build a prompt with context from relevant notes */ function buildContextFromNotes(sources: NoteSource[], query: string): string { console.log("Building context from notes with query:", query); @@ -529,14 +530,10 @@ function buildContextFromNotes(sources: NoteSource[], query: string): string { return query || ''; } - // Build a complete context prompt with clearer instructions - return `I'll provide you with relevant information from my notes to help answer your question. - -${noteContexts} - -When referring to information from these notes in your response, please cite them by their titles (e.g., "According to your note on [Title]...") rather than using labels like "Note 1" or "Note 2". - -Now, based on the above information, please answer: ${query}`; + // Use the template from the constants file, replacing placeholders + return CONTEXT_PROMPTS.CONTEXT_NOTES_WRAPPER + .replace('{noteContexts}', noteContexts) + .replace('{query}', query); } /** diff --git a/src/services/llm/agent_tools/contextual_thinking_tool.ts b/src/services/llm/agent_tools/contextual_thinking_tool.ts index 37db17293..139f3e604 100644 --- a/src/services/llm/agent_tools/contextual_thinking_tool.ts +++ b/src/services/llm/agent_tools/contextual_thinking_tool.ts @@ -15,6 +15,7 @@ import log from "../../log.js"; import aiServiceManager from "../ai_service_manager.js"; +import { AGENT_TOOL_PROMPTS } from '../llm_prompt_constants.js'; /** * Represents a single reasoning step taken by the agent @@ -73,17 +74,17 @@ export class ContextualThinkingTool { // Initialize with some starter thinking steps this.addThinkingStep(thinkingId, { type: 'observation', - content: `Starting analysis of the query: "${query}"` + content: AGENT_TOOL_PROMPTS.CONTEXTUAL_THINKING.STARTING_ANALYSIS(query) }); this.addThinkingStep(thinkingId, { type: 'question', - content: `What are the key components of this query that need to be addressed?` + content: AGENT_TOOL_PROMPTS.CONTEXTUAL_THINKING.KEY_COMPONENTS }); this.addThinkingStep(thinkingId, { type: 'observation', - content: `Breaking down the query to understand its requirements and context.` + content: AGENT_TOOL_PROMPTS.CONTEXTUAL_THINKING.BREAKING_DOWN }); return thinkingId; diff --git a/src/services/llm/agent_tools/query_decomposition_tool.ts b/src/services/llm/agent_tools/query_decomposition_tool.ts index 9e161ae73..4286982ee 100644 --- a/src/services/llm/agent_tools/query_decomposition_tool.ts +++ b/src/services/llm/agent_tools/query_decomposition_tool.ts @@ -13,6 +13,7 @@ */ import log from '../../log.js'; +import { AGENT_TOOL_PROMPTS } from '../llm_prompt_constants.js'; export interface SubQuery { id: string; @@ -66,7 +67,7 @@ export class QueryDecompositionTool { const mainSubQuery = { id: this.generateSubQueryId(), text: query, - reason: 'Direct question that can be answered without decomposition', + reason: AGENT_TOOL_PROMPTS.QUERY_DECOMPOSITION.SUB_QUERY_DIRECT, isAnswered: false }; @@ -74,7 +75,7 @@ export class QueryDecompositionTool { const genericQuery = { id: this.generateSubQueryId(), text: `Information related to ${query}`, - reason: "Generic exploration to find related content", + reason: AGENT_TOOL_PROMPTS.QUERY_DECOMPOSITION.SUB_QUERY_GENERIC, isAnswered: false }; @@ -110,7 +111,7 @@ export class QueryDecompositionTool { subQueries: [{ id: this.generateSubQueryId(), text: query, - reason: 'Error in decomposition, treating as simple query', + reason: AGENT_TOOL_PROMPTS.QUERY_DECOMPOSITION.SUB_QUERY_ERROR, isAnswered: false }], status: 'pending', @@ -290,7 +291,7 @@ export class QueryDecompositionTool { return [{ id: this.generateSubQueryId(), text: query, - reason: 'Direct analysis of note details', + reason: AGENT_TOOL_PROMPTS.QUERY_DECOMPOSITION.SUB_QUERY_DIRECT_ANALYSIS, isAnswered: false }]; } @@ -299,7 +300,7 @@ export class QueryDecompositionTool { subQueries.push({ id: this.generateSubQueryId(), text: query, - reason: 'Original query', + reason: AGENT_TOOL_PROMPTS.QUERY_DECOMPOSITION.ORIGINAL_QUERY, isAnswered: false }); diff --git a/src/services/llm/base_ai_service.ts b/src/services/llm/base_ai_service.ts index 3a02d0fc7..64f8e8299 100644 --- a/src/services/llm/base_ai_service.ts +++ b/src/services/llm/base_ai_service.ts @@ -1,5 +1,6 @@ import options from '../options.js'; import type { AIService, ChatCompletionOptions, ChatResponse, Message } from './ai_interface.js'; +import { DEFAULT_SYSTEM_PROMPT } from './llm_prompt_constants.js'; export abstract class BaseAIService implements AIService { protected name: string; @@ -19,11 +20,7 @@ export abstract class BaseAIService implements AIService { } protected getSystemPrompt(customPrompt?: string): string { - // Default system prompt if none is provided - return customPrompt || - "You are a helpful assistant embedded in the Trilium Notes application. " + - "You can help users with their notes, answer questions, and provide information. " + - "Keep your responses concise and helpful. " + - "You're currently chatting with the user about their notes."; + // Use prompt from constants file if no custom prompt is provided + return customPrompt || DEFAULT_SYSTEM_PROMPT; } } diff --git a/src/services/llm/context/modules/context_formatter.ts b/src/services/llm/context/modules/context_formatter.ts index 2d18f5ede..c37767e66 100644 --- a/src/services/llm/context/modules/context_formatter.ts +++ b/src/services/llm/context/modules/context_formatter.ts @@ -1,5 +1,6 @@ import sanitizeHtml from 'sanitize-html'; import log from '../../../log.js'; +import { CONTEXT_PROMPTS } from '../../llm_prompt_constants.js'; // Constants for context window sizes, defines in-module to avoid circular dependencies const CONTEXT_WINDOW = { @@ -23,10 +24,8 @@ export class ContextFormatter { */ async buildContextFromNotes(sources: any[], query: string, providerId: string = 'default'): Promise { if (!sources || sources.length === 0) { - // Return a default context instead of empty string - return "I am an AI assistant helping you with your Trilium notes. " + - "I couldn't find any specific notes related to your query, but I'll try to assist you " + - "with general knowledge about Trilium or other topics you're interested in."; + // Return a default context from constants instead of empty string + return CONTEXT_PROMPTS.NO_NOTES_CONTEXT; } try { @@ -42,8 +41,8 @@ export class ContextFormatter { // Start with different headers based on provider let context = isAnthropicFormat - ? `I'm your AI assistant helping with your Trilium notes database. For your query: "${query}", I found these relevant notes:\n\n` - : `I've found some relevant information in your notes that may help answer: "${query}"\n\n`; + ? CONTEXT_PROMPTS.CONTEXT_HEADERS.ANTHROPIC(query) + : CONTEXT_PROMPTS.CONTEXT_HEADERS.DEFAULT(query); // Sort sources by similarity if available to prioritize most relevant if (sources[0] && sources[0].similarity !== undefined) { @@ -97,8 +96,8 @@ export class ContextFormatter { // Add closing to provide instructions to the AI const closing = isAnthropicFormat - ? "\n\nPlease use this information to answer the user's query. If the notes don't contain enough information, you can use your general knowledge as well." - : "\n\nBased on this information from the user's notes, please provide a helpful response."; + ? CONTEXT_PROMPTS.CONTEXT_CLOSINGS.ANTHROPIC + : CONTEXT_PROMPTS.CONTEXT_CLOSINGS.DEFAULT; // Check if adding the closing would exceed our limit if (totalSize + closing.length <= maxTotalLength) { @@ -108,7 +107,7 @@ export class ContextFormatter { return context; } catch (error) { log.error(`Error building context from notes: ${error}`); - return "I'm your AI assistant helping with your Trilium notes. I'll try to answer based on what I know."; + return CONTEXT_PROMPTS.ERROR_FALLBACK_CONTEXT; } } diff --git a/src/services/llm/context/modules/context_service.ts b/src/services/llm/context/modules/context_service.ts index 2cc7102f8..dd749c7a1 100644 --- a/src/services/llm/context/modules/context_service.ts +++ b/src/services/llm/context/modules/context_service.ts @@ -6,6 +6,7 @@ import queryEnhancer from './query_enhancer.js'; import contextFormatter from './context_formatter.js'; import aiServiceManager from '../../ai_service_manager.js'; import { ContextExtractor } from '../index.js'; +import { CONTEXT_PROMPTS } from '../../llm_prompt_constants.js'; /** * Main context service that integrates all context-related functionality @@ -84,8 +85,7 @@ export class ContextService { log.error(`Failed to initialize ContextService: ${error}`); // Return a fallback response if initialization fails return { - context: "I am an AI assistant helping you with your Trilium notes. " + - "I'll try to assist you with general knowledge about your query.", + context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT, notes: [], queries: [userQuestion] }; @@ -175,8 +175,7 @@ export class ContextService { } catch (error) { log.error(`Error processing query: ${error}`); return { - context: "I am an AI assistant helping you with your Trilium notes. " + - "I'll try to assist you with general knowledge about your query.", + context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT, notes: [], queries: [userQuestion] }; diff --git a/src/services/llm/context/modules/query_enhancer.ts b/src/services/llm/context/modules/query_enhancer.ts index baae7265b..6eaef3b32 100644 --- a/src/services/llm/context/modules/query_enhancer.ts +++ b/src/services/llm/context/modules/query_enhancer.ts @@ -1,17 +1,14 @@ import log from '../../../log.js'; import cacheManager from './cache_manager.js'; import type { Message } from '../../ai_interface.js'; +import { CONTEXT_PROMPTS } from '../../llm_prompt_constants.js'; /** * Provides utilities for enhancing queries and generating search queries */ export class QueryEnhancer { - // Default meta-prompt for query enhancement - private metaPrompt = `You are an AI assistant that decides what information needs to be retrieved from a user's knowledge base called TriliumNext Notes to answer the user's question. -Given the user's question, generate 3-5 specific search queries that would help find relevant information. -Each query should be focused on a different aspect of the question. -Format your answer as a JSON array of strings, with each string being a search query. -Example: ["exact topic mentioned", "related concept 1", "related concept 2"]`; + // Use the centralized query enhancer prompt + private metaPrompt = CONTEXT_PROMPTS.QUERY_ENHANCER; /** * Generate search queries to find relevant information for the user question diff --git a/src/services/llm/index_service.ts b/src/services/llm/index_service.ts index 740e4d48c..434fc5e07 100644 --- a/src/services/llm/index_service.ts +++ b/src/services/llm/index_service.ts @@ -20,6 +20,7 @@ import type { NoteEmbeddingContext } from "./embeddings/embeddings_interface.js" import type { OptionDefinitions } from "../options_interface.js"; import sql from "../sql.js"; import sqlInit from "../sql_init.js"; +import { CONTEXT_PROMPTS } from './llm_prompt_constants.js'; class IndexService { private initialized = false; @@ -691,7 +692,7 @@ class IndexService { ); if (similarNotes.length === 0) { - return "I'm an AI assistant helping with your Trilium notes. I couldn't find specific notes related to your query, but I'll try to assist based on general knowledge."; + return CONTEXT_PROMPTS.INDEX_NO_NOTES_CONTEXT; } // Build context from the similar notes diff --git a/src/services/llm/llm_prompt_constants.ts b/src/services/llm/llm_prompt_constants.ts new file mode 100644 index 000000000..c77b2d284 --- /dev/null +++ b/src/services/llm/llm_prompt_constants.ts @@ -0,0 +1,98 @@ +/** + * LLM Prompt Constants + * + * This file centralizes all LLM/AI prompts used throughout the application. + * When adding new prompts, please add them here rather than hardcoding them in other files. + * + * Prompts are organized by their usage context (e.g., service, feature, etc.) + */ + +// Base system prompt used when no custom prompt is provided +export const DEFAULT_SYSTEM_PROMPT = + "You are a helpful assistant embedded in the Trilium Notes application. " + + "You can help users with their notes, answer questions, and provide information. " + + "Keep your responses concise and helpful. " + + "You're currently chatting with the user about their notes."; + +// Context-specific prompts +export const CONTEXT_PROMPTS = { + // Query enhancer prompt for generating better search terms + QUERY_ENHANCER: + `You are an AI assistant that decides what information needs to be retrieved from a user's knowledge base called TriliumNext Notes to answer the user's question. +Given the user's question, generate 3-5 specific search queries that would help find relevant information. +Each query should be focused on a different aspect of the question. +Format your answer as a JSON array of strings, with each string being a search query. +Example: ["exact topic mentioned", "related concept 1", "related concept 2"]`, + + // Used to format notes context when providing responses + CONTEXT_NOTES_WRAPPER: + `I'll provide you with relevant information from my notes to help answer your question. + +{noteContexts} + +When referring to information from these notes in your response, please cite them by their titles (e.g., "According to your note on [Title]...") rather than using labels like "Note 1" or "Note 2". + +Now, based on the above information, please answer: {query}`, + + // Default fallback when no notes are found + NO_NOTES_CONTEXT: + "I am an AI assistant helping you with your Trilium notes. " + + "I couldn't find any specific notes related to your query, but I'll try to assist you " + + "with general knowledge about Trilium or other topics you're interested in.", + + // Fallback when context building fails + ERROR_FALLBACK_CONTEXT: + "I'm your AI assistant helping with your Trilium notes. I'll try to answer based on what I know.", + + // Headers for context (by provider) + CONTEXT_HEADERS: { + ANTHROPIC: (query: string) => + `I'm your AI assistant helping with your Trilium notes database. For your query: "${query}", I found these relevant notes:\n\n`, + DEFAULT: (query: string) => + `I've found some relevant information in your notes that may help answer: "${query}"\n\n` + }, + + // Closings for context (by provider) + CONTEXT_CLOSINGS: { + ANTHROPIC: + "\n\nPlease use this information to answer the user's query. If the notes don't contain enough information, you can use your general knowledge as well.", + DEFAULT: + "\n\nBased on this information from the user's notes, please provide a helpful response." + }, + + // Context for index service + INDEX_NO_NOTES_CONTEXT: + "I'm an AI assistant helping with your Trilium notes. I couldn't find specific notes related to your query, but I'll try to assist based on general knowledge." +}; + +// Agent tool prompts +export const AGENT_TOOL_PROMPTS = { + // Prompts for query decomposition + QUERY_DECOMPOSITION: { + SUB_QUERY_DIRECT: 'Direct question that can be answered without decomposition', + SUB_QUERY_GENERIC: 'Generic exploration to find related content', + SUB_QUERY_ERROR: 'Error in decomposition, treating as simple query', + SUB_QUERY_DIRECT_ANALYSIS: 'Direct analysis of note details', + ORIGINAL_QUERY: 'Original query' + }, + + // Prompts for contextual thinking tool + CONTEXTUAL_THINKING: { + STARTING_ANALYSIS: (query: string) => `Starting analysis of the query: "${query}"`, + KEY_COMPONENTS: 'What are the key components of this query that need to be addressed?', + BREAKING_DOWN: 'Breaking down the query to understand its requirements and context.' + } +}; + +// Provider-specific prompt modifiers +export const PROVIDER_PROMPTS = { + ANTHROPIC: { + // Any Anthropic Claude-specific prompt modifications would go here + }, + OPENAI: { + // Any OpenAI-specific prompt modifications would go here + }, + OLLAMA: { + // Any Ollama-specific prompt modifications would go here + } +};