create more interfaces to decrease use of "any"

This commit is contained in:
perf3ct 2025-03-28 21:04:12 +00:00
parent 44cd2ebda6
commit 005ddc4a59
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
7 changed files with 444 additions and 55 deletions

View File

@ -15,10 +15,20 @@ import contextService from '../context_service.js';
import aiServiceManager from '../ai_service_manager.js';
import log from '../../log.js';
// Import interfaces
import type {
IAgentToolsManager,
LLMServiceInterface,
IVectorSearchTool,
INoteNavigatorTool,
IQueryDecompositionTool,
IContextualThinkingTool
} from '../interfaces/agent_tool_interfaces.js';
/**
* Manages all agent tools and provides a unified interface for the LLM agent
*/
export class AgentToolsManager {
export class AgentToolsManager implements IAgentToolsManager {
private vectorSearchTool: VectorSearchTool | null = null;
private noteNavigatorTool: NoteNavigatorTool | null = null;
private queryDecompositionTool: QueryDecompositionTool | null = null;
@ -29,7 +39,7 @@ export class AgentToolsManager {
// Initialize tools only when requested to avoid circular dependencies
}
async initialize(aiServiceManager: any): Promise<void> {
async initialize(aiServiceManager: LLMServiceInterface): Promise<void> {
try {
if (this.initialized) {
return;
@ -68,17 +78,17 @@ export class AgentToolsManager {
}
return {
vectorSearch: this.vectorSearchTool,
noteNavigator: this.noteNavigatorTool,
queryDecomposition: this.queryDecompositionTool,
contextualThinking: this.contextualThinkingTool
vectorSearch: this.vectorSearchTool as IVectorSearchTool,
noteNavigator: this.noteNavigatorTool as INoteNavigatorTool,
queryDecomposition: this.queryDecompositionTool as IQueryDecompositionTool,
contextualThinking: this.contextualThinkingTool as IContextualThinkingTool
};
}
/**
* Get the vector search tool
*/
getVectorSearchTool(): VectorSearchTool {
getVectorSearchTool(): IVectorSearchTool {
if (!this.initialized || !this.vectorSearchTool) {
throw new Error("Vector search tool not initialized");
}
@ -88,7 +98,7 @@ export class AgentToolsManager {
/**
* Get the note structure navigator tool
*/
getNoteNavigatorTool(): NoteNavigatorTool {
getNoteNavigatorTool(): INoteNavigatorTool {
if (!this.initialized || !this.noteNavigatorTool) {
throw new Error("Note navigator tool not initialized");
}
@ -98,7 +108,7 @@ export class AgentToolsManager {
/**
* Get the query decomposition tool
*/
getQueryDecompositionTool(): QueryDecompositionTool {
getQueryDecompositionTool(): IQueryDecompositionTool {
if (!this.initialized || !this.queryDecompositionTool) {
throw new Error("Query decomposition tool not initialized");
}
@ -108,7 +118,7 @@ export class AgentToolsManager {
/**
* Get the contextual thinking tool
*/
getContextualThinkingTool(): ContextualThinkingTool {
getContextualThinkingTool(): IContextualThinkingTool {
if (!this.initialized || !this.contextualThinkingTool) {
throw new Error("Contextual thinking tool not initialized");
}

View File

@ -10,9 +10,15 @@ import indexService from './index_service.js';
import { getEmbeddingProvider, getEnabledEmbeddingProviders } from './providers/providers.js';
import agentTools from './agent_tools/index.js';
type ServiceProviders = 'openai' | 'anthropic' | 'ollama';
// Import interfaces
import type {
ServiceProviders,
IAIServiceManager,
ProviderMetadata
} from './interfaces/ai_service_interfaces.js';
import type { NoteSearchResult } from './interfaces/context_interfaces.js';
export class AIServiceManager {
export class AIServiceManager implements IAIServiceManager {
private services: Record<ServiceProviders, AIService> = {
openai: new OpenAIService(),
anthropic: new AnthropicService(),
@ -406,28 +412,96 @@ export class AIServiceManager {
}
/**
* Get context enhanced with agent tools
* Get context from agent tools
*/
async getAgentToolsContext(
noteId: string,
query: string,
showThinking: boolean = false,
relevantNotes: Array<any> = []
relevantNotes: NoteSearchResult[] = []
): Promise<string> {
// Just use the context service directly
try {
const cs = (await import('./context/modules/context_service.js')).default;
return cs.getAgentToolsContext(
noteId,
query,
showThinking,
relevantNotes
);
if (!this.getAIEnabled()) {
return '';
}
await this.initializeAgentTools();
return await contextService.getAgentToolsContext(noteId, query, showThinking);
} catch (error) {
log.error(`Error in AIServiceManager.getAgentToolsContext: ${error}`);
return `Error generating enhanced context: ${error}`;
log.error(`Error getting agent tools context: ${error}`);
return '';
}
}
/**
* Get AI service for the given provider
*/
getService(provider?: string): AIService {
this.ensureInitialized();
// If provider is specified, try to use it
if (provider && this.services[provider as ServiceProviders]?.isAvailable()) {
return this.services[provider as ServiceProviders];
}
// Otherwise, use the first available provider in the configured order
for (const providerName of this.providerOrder) {
const service = this.services[providerName];
if (service.isAvailable()) {
return service;
}
}
// If no provider is available, use first one anyway (it will throw an error)
// This allows us to show a proper error message rather than "provider not found"
return this.services[this.providerOrder[0]];
}
/**
* Get the preferred provider based on configuration
*/
getPreferredProvider(): string {
this.ensureInitialized();
// Return the first available provider in the order
for (const providerName of this.providerOrder) {
if (this.services[providerName].isAvailable()) {
return providerName;
}
}
// Return the first provider as fallback
return this.providerOrder[0];
}
/**
* Check if a specific provider is available
*/
isProviderAvailable(provider: string): boolean {
return this.services[provider as ServiceProviders]?.isAvailable() ?? false;
}
/**
* Get metadata about a provider
*/
getProviderMetadata(provider: string): ProviderMetadata | null {
const service = this.services[provider as ServiceProviders];
if (!service) {
return null;
}
return {
name: provider,
capabilities: {
chat: true,
embeddings: provider !== 'anthropic', // Anthropic doesn't have embeddings
streaming: true,
functionCalling: provider === 'openai' // Only OpenAI has function calling
},
models: ['default'], // Placeholder, could be populated from the service
defaultModel: 'default'
};
}
}
// Don't create singleton immediately, use a lazy-loading pattern
@ -493,7 +567,7 @@ export default {
noteId: string,
query: string,
showThinking: boolean = false,
relevantNotes: Array<any> = []
relevantNotes: NoteSearchResult[] = []
): Promise<string> {
return getInstance().getAgentToolsContext(
noteId,
@ -501,6 +575,19 @@ export default {
showThinking,
relevantNotes
);
},
// New methods
getService(provider?: string): AIService {
return getInstance().getService(provider);
},
getPreferredProvider(): string {
return getInstance().getPreferredProvider();
},
isProviderAvailable(provider: string): boolean {
return getInstance().isProviderAvailable(provider);
},
getProviderMetadata(provider: string): ProviderMetadata | null {
return getInstance().getProviderMetadata(provider);
}
};

View File

@ -1,21 +1,16 @@
import log from '../../../log.js';
import type { ICacheManager, CachedNoteData, CachedQueryResults } from '../../interfaces/context_interfaces.js';
/**
* Manages caching for context services
* Provides a centralized caching system to avoid redundant operations
*/
export class CacheManager {
export class CacheManager implements ICacheManager {
// Cache for recently used context to avoid repeated embedding lookups
private noteDataCache = new Map<string, {
timestamp: number,
data: any
}>();
private noteDataCache = new Map<string, CachedNoteData<unknown>>();
// Cache for recently used queries
private queryCache = new Map<string, {
timestamp: number,
results: any
}>();
private queryCache = new Map<string, CachedQueryResults<unknown>>();
// Default cache expiry (5 minutes)
private defaultCacheExpiryMs = 5 * 60 * 1000;
@ -57,13 +52,13 @@ export class CacheManager {
/**
* Get cached note data
*/
getNoteData(noteId: string, type: string): any | null {
getNoteData<T>(noteId: string, type: string): T | null {
const key = `${noteId}:${type}`;
const cached = this.noteDataCache.get(key);
if (cached && Date.now() - cached.timestamp < this.defaultCacheExpiryMs) {
log.info(`Cache hit for note data: ${key}`);
return cached.data;
return cached.data as T;
}
return null;
@ -72,7 +67,7 @@ export class CacheManager {
/**
* Store note data in cache
*/
storeNoteData(noteId: string, type: string, data: any): void {
storeNoteData<T>(noteId: string, type: string, data: T): void {
const key = `${noteId}:${type}`;
this.noteDataCache.set(key, {
timestamp: Date.now(),
@ -84,13 +79,13 @@ export class CacheManager {
/**
* Get cached query results
*/
getQueryResults(query: string, contextNoteId: string | null = null): any | null {
getQueryResults<T>(query: string, contextNoteId: string | null = null): T | null {
const key = JSON.stringify({ query, contextNoteId });
const cached = this.queryCache.get(key);
if (cached && Date.now() - cached.timestamp < this.defaultCacheExpiryMs) {
log.info(`Cache hit for query: ${query}`);
return cached.results;
return cached.results as T;
}
return null;
@ -99,7 +94,7 @@ export class CacheManager {
/**
* Store query results in cache
*/
storeQueryResults(query: string, results: any, contextNoteId: string | null = null): void {
storeQueryResults<T>(query: string, results: T, contextNoteId: string | null = null): void {
const key = JSON.stringify({ query, contextNoteId });
this.queryCache.set(key, {
timestamp: Date.now(),

View File

@ -1,6 +1,7 @@
import sanitizeHtml from 'sanitize-html';
import log from '../../../log.js';
import { CONTEXT_PROMPTS } from '../../constants/llm_prompt_constants.js';
import type { IContextFormatter, NoteSearchResult } from '../../interfaces/context_interfaces.js';
// Constants for context window sizes, defines in-module to avoid circular dependencies
const CONTEXT_WINDOW = {
@ -11,20 +12,23 @@ const CONTEXT_WINDOW = {
};
/**
* Provides utilities for formatting context for LLM consumption
* Formats context data for LLM consumption
*
* This service is responsible for formatting note data into a structured
* format that can be efficiently processed by the LLM.
*/
export class ContextFormatter {
export class ContextFormatter implements IContextFormatter {
/**
* Build context string from retrieved notes
* Build a structured context string from note sources
*
* @param sources - Array of notes or content sources
* @param query - The original user query
* @param providerId - The LLM provider to format for
* @param sources Array of note data with content and metadata
* @param query The user's query for context
* @param providerId Optional provider ID to customize formatting
* @returns Formatted context string
*/
async buildContextFromNotes(sources: any[], query: string, providerId: string = 'default'): Promise<string> {
async buildContextFromNotes(sources: NoteSearchResult[], query: string, providerId: string = 'default'): Promise<string> {
if (!sources || sources.length === 0) {
// Return a default context from constants instead of empty string
log.info('No sources provided to context formatter');
return CONTEXT_PROMPTS.NO_NOTES_CONTEXT;
}
@ -38,13 +42,13 @@ export class ContextFormatter {
// DEBUG: Log context window size
log.info(`Context window for provider ${providerId}: ${maxTotalLength} chars`);
log.info(`Building context from ${sources.length} sources for query: "${query.substring(0, 50)}..."`);
log.info(`Formatting context from ${sources.length} sources for query: "${query.substring(0, 50)}..."`);
// Use a format appropriate for the model family
const isAnthropicFormat = providerId === 'anthropic';
// Start with different headers based on provider
let context = isAnthropicFormat
let formattedContext = isAnthropicFormat
? CONTEXT_PROMPTS.CONTEXT_HEADERS.ANTHROPIC(query)
: CONTEXT_PROMPTS.CONTEXT_HEADERS.DEFAULT(query);
@ -56,7 +60,7 @@ export class ContextFormatter {
}
// Track total size to avoid exceeding model context window
let totalSize = context.length;
let totalSize = formattedContext.length;
const formattedSources: string[] = [];
// DEBUG: Track stats for logging
@ -119,7 +123,7 @@ export class ContextFormatter {
log.info(`Context size so far: ${totalSize}/${maxTotalLength} chars (${(totalSize/maxTotalLength*100).toFixed(2)}% of limit)`);
// Add the formatted sources to the context
context += formattedSources.join('\n');
formattedContext += formattedSources.join('\n');
// Add closing to provide instructions to the AI
const closing = isAnthropicFormat
@ -128,13 +132,13 @@ export class ContextFormatter {
// Check if adding the closing would exceed our limit
if (totalSize + closing.length <= maxTotalLength) {
context += closing;
formattedContext += closing;
}
// DEBUG: Log final context size
log.info(`Final context: ${context.length} chars, ${formattedSources.length} sources included`);
log.info(`Final context: ${formattedContext.length} chars, ${formattedSources.length} sources included`);
return context;
return formattedContext;
} catch (error) {
log.error(`Error building context from notes: ${error}`);
return CONTEXT_PROMPTS.ERROR_FALLBACK_CONTEXT;

View File

@ -0,0 +1,147 @@
import type { ChatResponse } from '../ai_interface.js';
import type { VectorSearchResult } from '../agent_tools/vector_search_tool.js';
import type { NoteInfo, NotePathInfo, NoteHierarchyLevel } from '../agent_tools/note_navigator_tool.js';
import type { DecomposedQuery, SubQuery } from '../agent_tools/query_decomposition_tool.js';
import type { ThinkingProcess, ThinkingStep } from '../agent_tools/contextual_thinking_tool.js';
import type BAttribute from '../../../becca/entities/battribute.js';
/**
* Interface for the AI service used by agent tools
*/
export interface LLMServiceInterface {
generateChatCompletion(messages: Array<{
role: 'user' | 'assistant' | 'system';
content: string;
}>, options?: {
temperature?: number;
maxTokens?: number;
model?: string;
stream?: boolean;
systemPrompt?: string;
}): Promise<ChatResponse>;
}
/**
* Interface for tool initialization
*/
export interface AgentToolInitializationParams {
aiServiceManager: LLMServiceInterface;
}
/**
* Interface for agent tool manager
*/
export interface IAgentToolsManager {
initialize(aiServiceManager: LLMServiceInterface): Promise<void>;
isInitialized(): boolean;
getAllTools(): {
vectorSearch: IVectorSearchTool;
noteNavigator: INoteNavigatorTool;
queryDecomposition: IQueryDecompositionTool;
contextualThinking: IContextualThinkingTool;
};
getVectorSearchTool(): IVectorSearchTool;
getNoteNavigatorTool(): INoteNavigatorTool;
getQueryDecompositionTool(): IQueryDecompositionTool;
getContextualThinkingTool(): IContextualThinkingTool;
}
/**
* Interface for context service used by vector search
*/
export interface IContextService {
findRelevantNotesMultiQuery(queries: string[], contextNoteId: string | null, limit: number): Promise<VectorSearchResult[]>;
processQuery(userQuestion: string, llmService: LLMServiceInterface, contextNoteId: string | null, showThinking: boolean): Promise<{
context: string;
sources: Array<{
noteId: string;
title: string;
similarity: number;
}>;
thinking?: string;
}>;
}
/**
* Interface for vector search tool
*/
export interface IVectorSearchTool {
setContextService(contextService: IContextService): void;
search(
query: string,
contextNoteId?: string,
searchOptions?: {
limit?: number;
threshold?: number;
includeContent?: boolean;
}
): Promise<VectorSearchResult[]>;
searchNotes(query: string, options?: {
parentNoteId?: string;
maxResults?: number;
similarityThreshold?: number;
}): Promise<VectorSearchResult[]>;
searchContentChunks(query: string, options?: {
noteId?: string;
maxResults?: number;
similarityThreshold?: number;
}): Promise<VectorSearchResult[]>;
explainResults(query: string, results: VectorSearchResult[]): string;
}
/**
* Interface for note navigator tool
*/
export interface INoteNavigatorTool {
getNoteInfo(noteId: string): NoteInfo | null;
getNotePathsFromRoot(noteId: string): NotePathInfo[];
getNoteHierarchy(noteId: string, depth?: number): NoteHierarchyLevel | null;
getNoteAttributes(noteId: string): BAttribute[];
findPathBetweenNotes(fromNoteId: string, toNoteId: string): NotePathInfo | null;
searchNotesByTitle(searchTerm: string, limit?: number): NoteInfo[];
getNoteClones(noteId: string): Promise<NoteInfo[]>;
getNoteContextDescription(noteId: string): Promise<string>;
getNoteStructure(noteId: string): Promise<{
noteId: string;
title: string;
type: string;
childCount: number;
attributes: Array<{name: string, value: string}>;
parentPath: Array<{title: string, noteId: string}>;
}>;
getChildNotes(noteId: string, limit?: number): Promise<Array<{noteId: string, title: string}>>;
getParentNotes(noteId: string): Promise<Array<{noteId: string, title: string}>>;
getLinkedNotes(noteId: string, limit?: number): Promise<Array<{noteId: string, title: string, direction: 'from'|'to'}>>;
getNotePath(noteId: string): Promise<string>;
}
/**
* Interface for query decomposition tool
*/
export interface IQueryDecompositionTool {
decomposeQuery(query: string, context?: string): DecomposedQuery;
updateSubQueryAnswer(decomposedQuery: DecomposedQuery, subQueryId: string, answer: string): DecomposedQuery;
synthesizeAnswer(decomposedQuery: DecomposedQuery): string;
getQueryStatus(decomposedQuery: DecomposedQuery): string;
assessQueryComplexity(query: string): number;
generateSubQueryId(): string;
createSubQueries(query: string, context?: string): SubQuery[];
}
/**
* Interface for contextual thinking tool
*/
export interface IContextualThinkingTool {
startThinking(query: string): string;
addThinkingStep(
processId: string,
step: Omit<ThinkingStep, 'id'>,
parentId?: string
): string;
completeThinking(processId?: string): ThinkingProcess | null;
getThinkingProcess(processId: string): ThinkingProcess | null;
getActiveThinkingProcess(): ThinkingProcess | null;
visualizeThinking(thinkingId: string): string;
getThinkingSummary(thinkingId: string): string;
resetActiveThinking(): void;
}

View File

@ -0,0 +1,52 @@
import type { AIService, Message, ChatCompletionOptions, ChatResponse } from '../ai_interface.js';
/**
* Interface for any LLM provider metadata
*/
export interface ProviderMetadata {
name: string;
capabilities: {
chat: boolean;
embeddings: boolean;
streaming: boolean;
functionCalling?: boolean;
};
models: string[];
defaultModel?: string;
}
/**
* Interface for AI service manager configuration
*/
export interface AIServiceManagerConfig {
defaultProvider?: string;
fallbackProviders?: string[];
customModels?: Record<string, string>;
}
/**
* Interface for managing AI service providers
*/
export interface IAIServiceManager {
getService(provider?: string): AIService;
getAvailableProviders(): string[];
getPreferredProvider(): string;
isProviderAvailable(provider: string): boolean;
getProviderMetadata(provider: string): ProviderMetadata | null;
getAIEnabled(): boolean;
}
/**
* Type for service providers
*/
export type ServiceProviders = 'openai' | 'anthropic' | 'ollama';
/**
* LLM model configuration
*/
export interface ModelConfig {
provider: string;
model: string;
maxTokens?: number;
temperature?: number;
}

View File

@ -0,0 +1,94 @@
/**
* Interface for note data in cache
*/
export interface CachedNoteData<T> {
timestamp: number;
data: T;
}
/**
* Interface for query results in cache
*/
export interface CachedQueryResults<T> {
timestamp: number;
results: T;
}
/**
* Interface for cache manager
*/
export interface ICacheManager {
getNoteData<T>(noteId: string, type: string): T | null;
storeNoteData<T>(noteId: string, type: string, data: T): void;
getQueryResults<T>(query: string, contextNoteId: string | null): T | null;
storeQueryResults<T>(query: string, results: T, contextNoteId: string | null): void;
cleanupCache(): void;
clearAllCaches(): void;
}
/**
* Interface for note data in search results
*/
export interface NoteSearchResult {
noteId: string;
title: string;
content?: string;
type?: string;
mime?: string;
similarity: number;
parentId?: string;
parentTitle?: string;
dateCreated?: string;
dateModified?: string;
}
/**
* Interface for context formatter
*/
export interface IContextFormatter {
buildContextFromNotes(sources: NoteSearchResult[], query: string, providerId?: string): Promise<string>;
}
/**
* Interface for query enhancer
*/
export interface IQueryEnhancer {
generateSearchQueries(userQuestion: string, llmService: {
generateChatCompletion: (messages: Array<{
role: 'user' | 'assistant' | 'system';
content: string;
}>, options?: {
temperature?: number;
maxTokens?: number;
}) => Promise<{
text: string;
}>;
}): Promise<string[]>;
}
/**
* Interface for content chunk
*/
export interface ContentChunk {
content: string;
metadata?: Record<string, unknown>;
}
/**
* Interface for note chunk
*/
export interface NoteChunk {
noteId: string;
title: string;
content: string;
type?: string;
metadata?: Record<string, unknown>;
}
/**
* Interface for content chunking service
*/
export interface IContentChunker {
chunkContent(content: string, metadata?: Record<string, unknown>): ContentChunk[];
chunkNoteContent(noteId: string, content: string, title: string): Promise<NoteChunk[]>;
}