From c26b74495c3b7f5062bc1d38dc0b3e0c2e4ca168 Mon Sep 17 00:00:00 2001 From: perf3ct Date: Thu, 5 Jun 2025 22:34:20 +0000 Subject: [PATCH] feat(llm): remove LLM deprecated functions --- apps/server/src/routes/api/embeddings.ts | 2 +- .../src/services/llm/ai_service_manager.ts | 10 ++--- .../services/llm/chat/rest_chat_service.ts | 4 +- .../llm/config/configuration_helpers.ts | 44 ------------------- .../llm/context/modules/provider_manager.ts | 8 ++-- .../llm/context/services/context_service.ts | 4 +- .../context/services/vector_search_service.ts | 4 +- .../src/services/llm/embeddings/index.ts | 4 -- .../src/services/llm/embeddings/stats.ts | 17 ------- .../src/services/llm/embeddings/storage.ts | 7 +-- apps/server/src/services/llm/index_service.ts | 4 +- .../llm/interfaces/ai_service_interfaces.ts | 2 +- .../semantic_context_extraction_stage.ts | 2 +- .../src/services/llm/providers/providers.ts | 14 +----- 14 files changed, 25 insertions(+), 101 deletions(-) diff --git a/apps/server/src/routes/api/embeddings.ts b/apps/server/src/routes/api/embeddings.ts index 8fd9b475a..226231ea2 100644 --- a/apps/server/src/routes/api/embeddings.ts +++ b/apps/server/src/routes/api/embeddings.ts @@ -408,7 +408,7 @@ async function reprocessAllNotes(req: Request, res: Response) { try { // Wrap the operation in cls.init to ensure proper context cls.init(async () => { - await vectorStore.reprocessAllNotes(); + await indexService.reprocessAllNotes(); log.info("Embedding reprocessing completed successfully"); }); } catch (error: any) { diff --git a/apps/server/src/services/llm/ai_service_manager.ts b/apps/server/src/services/llm/ai_service_manager.ts index f054dff57..805034072 100644 --- a/apps/server/src/services/llm/ai_service_manager.ts +++ b/apps/server/src/services/llm/ai_service_manager.ts @@ -512,7 +512,7 @@ export class AIServiceManager implements IAIServiceManager { if (!contextNotes || contextNotes.length === 0) { try { // Get the default LLM service for context enhancement - const provider = this.getPreferredProvider(); + const provider = this.getSelectedProvider(); const llmService = await this.getService(provider); // Find relevant notes @@ -596,9 +596,9 @@ export class AIServiceManager implements IAIServiceManager { } /** - * Get the preferred provider based on configuration (sync version for compatibility) + * Get the selected provider based on configuration (sync version for compatibility) */ - getPreferredProvider(): string { + getSelectedProvider(): string { this.ensureInitialized(); // Return the first available provider in the order @@ -803,8 +803,8 @@ export default { async getService(provider?: string): Promise { return getInstance().getService(provider); }, - getPreferredProvider(): string { - return getInstance().getPreferredProvider(); + getSelectedProvider(): string { + return getInstance().getSelectedProvider(); }, isProviderAvailable(provider: string): boolean { return getInstance().isProviderAvailable(provider); diff --git a/apps/server/src/services/llm/chat/rest_chat_service.ts b/apps/server/src/services/llm/chat/rest_chat_service.ts index 1ad3d7a22..4176eaffe 100644 --- a/apps/server/src/services/llm/chat/rest_chat_service.ts +++ b/apps/server/src/services/llm/chat/rest_chat_service.ts @@ -14,7 +14,7 @@ import type { LLMStreamMessage } from "../interfaces/chat_ws_messages.js"; import chatStorageService from '../chat_storage_service.js'; import { isAIEnabled, - getFirstValidModelConfig, + getSelectedModelConfig, } from '../config/configuration_helpers.js'; /** @@ -419,7 +419,7 @@ class RestChatService { */ async getPreferredModel(): Promise { try { - const validConfig = await getFirstValidModelConfig(); + const validConfig = await getSelectedModelConfig(); if (!validConfig) { log.error('No valid AI model configuration found'); return undefined; diff --git a/apps/server/src/services/llm/config/configuration_helpers.ts b/apps/server/src/services/llm/config/configuration_helpers.ts index 286716d3c..2635cc35f 100644 --- a/apps/server/src/services/llm/config/configuration_helpers.ts +++ b/apps/server/src/services/llm/config/configuration_helpers.ts @@ -150,47 +150,3 @@ export async function getSelectedModelConfig(): Promise<{ model: string; provide return await getValidModelConfig(selectedProvider); } -// Legacy support functions - these maintain backwards compatibility but now use single provider logic -/** - * @deprecated Use getSelectedProvider() instead - */ -export async function getProviderPrecedence(): Promise { - const selected = await getSelectedProvider(); - return selected ? [selected] : []; -} - -/** - * @deprecated Use getSelectedProvider() instead - */ -export async function getPreferredProvider(): Promise { - return await getSelectedProvider(); -} - -/** - * @deprecated Use getSelectedEmbeddingProvider() instead - */ -export async function getEmbeddingProviderPrecedence(): Promise { - const selected = await getSelectedEmbeddingProvider(); - return selected ? [selected] : []; -} - -/** - * @deprecated Use getSelectedEmbeddingProvider() instead - */ -export async function getPreferredEmbeddingProvider(): Promise { - return await getSelectedEmbeddingProvider(); -} - -/** - * @deprecated Use getAvailableSelectedProvider() instead - */ -export async function getFirstAvailableProvider(): Promise { - return await getAvailableSelectedProvider(); -} - -/** - * @deprecated Use getSelectedModelConfig() instead - */ -export async function getFirstValidModelConfig(): Promise<{ model: string; provider: ProviderType } | null> { - return await getSelectedModelConfig(); -} \ No newline at end of file diff --git a/apps/server/src/services/llm/context/modules/provider_manager.ts b/apps/server/src/services/llm/context/modules/provider_manager.ts index 6af8ac991..56c6437c0 100644 --- a/apps/server/src/services/llm/context/modules/provider_manager.ts +++ b/apps/server/src/services/llm/context/modules/provider_manager.ts @@ -1,6 +1,6 @@ import log from '../../../log.js'; import { getEmbeddingProvider, getEnabledEmbeddingProviders } from '../../providers/providers.js'; -import { getSelectedEmbeddingProvider } from '../../config/configuration_helpers.js'; +import { getSelectedEmbeddingProvider as getSelectedEmbeddingProviderName } from '../../config/configuration_helpers.js'; /** * Manages embedding providers for context services @@ -12,10 +12,10 @@ export class ProviderManager { * * @returns The selected embedding provider or null if none available */ - async getPreferredEmbeddingProvider(): Promise { + async getSelectedEmbeddingProvider(): Promise { try { // Get the selected embedding provider - const selectedProvider = await getSelectedEmbeddingProvider(); + const selectedProvider = await getSelectedEmbeddingProviderName(); if (selectedProvider) { const provider = await getEmbeddingProvider(selectedProvider); @@ -51,7 +51,7 @@ export class ProviderManager { async generateQueryEmbedding(query: string): Promise { try { // Get the preferred embedding provider - const provider = await this.getPreferredEmbeddingProvider(); + const provider = await this.getSelectedEmbeddingProvider(); if (!provider) { log.error('No embedding provider available'); return null; diff --git a/apps/server/src/services/llm/context/services/context_service.ts b/apps/server/src/services/llm/context/services/context_service.ts index a227c3936..b4d4fd613 100644 --- a/apps/server/src/services/llm/context/services/context_service.ts +++ b/apps/server/src/services/llm/context/services/context_service.ts @@ -58,7 +58,7 @@ export class ContextService { this.initPromise = (async () => { try { // Initialize provider - const provider = await providerManager.getPreferredEmbeddingProvider(); + const provider = await providerManager.getSelectedEmbeddingProvider(); if (!provider) { throw new Error(`No embedding provider available. Could not initialize context service.`); } @@ -224,7 +224,7 @@ export class ContextService { log.info(`Final combined results: ${relevantNotes.length} relevant notes`); // Step 4: Build context from the notes - const provider = await providerManager.getPreferredEmbeddingProvider(); + const provider = await providerManager.getSelectedEmbeddingProvider(); const providerId = provider?.name || 'default'; const context = await contextFormatter.buildContextFromNotes( diff --git a/apps/server/src/services/llm/context/services/vector_search_service.ts b/apps/server/src/services/llm/context/services/vector_search_service.ts index 480ba05bd..98c7b993c 100644 --- a/apps/server/src/services/llm/context/services/vector_search_service.ts +++ b/apps/server/src/services/llm/context/services/vector_search_service.ts @@ -79,7 +79,7 @@ export class VectorSearchService { } // Get provider information - const provider = await providerManager.getPreferredEmbeddingProvider(); + const provider = await providerManager.getSelectedEmbeddingProvider(); if (!provider) { log.error('No embedding provider available'); return []; @@ -280,7 +280,7 @@ export class VectorSearchService { } // Get provider information - const provider = await providerManager.getPreferredEmbeddingProvider(); + const provider = await providerManager.getSelectedEmbeddingProvider(); if (!provider) { log.error('No embedding provider available'); return []; diff --git a/apps/server/src/services/llm/embeddings/index.ts b/apps/server/src/services/llm/embeddings/index.ts index c931a1745..c4be44a2e 100644 --- a/apps/server/src/services/llm/embeddings/index.ts +++ b/apps/server/src/services/llm/embeddings/index.ts @@ -64,8 +64,6 @@ export const { export const { getEmbeddingStats, - reprocessAllNotes, - queueNotesForMissingEmbeddings, cleanupEmbeddings } = stats; @@ -107,8 +105,6 @@ export default { // Stats and maintenance getEmbeddingStats: stats.getEmbeddingStats, - reprocessAllNotes: stats.reprocessAllNotes, - queueNotesForMissingEmbeddings: stats.queueNotesForMissingEmbeddings, cleanupEmbeddings: stats.cleanupEmbeddings, // Index operations diff --git a/apps/server/src/services/llm/embeddings/stats.ts b/apps/server/src/services/llm/embeddings/stats.ts index 7fa0d6d82..a8b594723 100644 --- a/apps/server/src/services/llm/embeddings/stats.ts +++ b/apps/server/src/services/llm/embeddings/stats.ts @@ -1,14 +1,5 @@ import sql from "../../../services/sql.js"; import log from "../../../services/log.js"; -import indexService from '../index_service.js'; - -/** - * Reprocess all notes to update embeddings - * @deprecated Use indexService.reprocessAllNotes() directly instead - */ -export async function reprocessAllNotes() { - return indexService.reprocessAllNotes(); -} /** * Get current embedding statistics @@ -64,14 +55,6 @@ export async function getEmbeddingStats() { }; } -/** - * Queue notes that don't have embeddings for current provider settings - * @deprecated Use indexService.queueNotesForMissingEmbeddings() directly instead - */ -export async function queueNotesForMissingEmbeddings() { - return indexService.queueNotesForMissingEmbeddings(); -} - /** * Cleanup function to remove stale or unused embeddings */ diff --git a/apps/server/src/services/llm/embeddings/storage.ts b/apps/server/src/services/llm/embeddings/storage.ts index ac096071f..bbd1eba9c 100644 --- a/apps/server/src/services/llm/embeddings/storage.ts +++ b/apps/server/src/services/llm/embeddings/storage.ts @@ -11,7 +11,7 @@ import { SEARCH_CONSTANTS } from '../constants/search_constants.js'; import type { NoteEmbeddingContext } from "./embeddings_interface.js"; import becca from "../../../becca/becca.js"; import { isNoteExcludedFromAIById } from "../utils/ai_exclusion_utils.js"; -import { getEmbeddingProviderPrecedence } from '../config/configuration_helpers.js'; +import { getSelectedEmbeddingProvider } from '../config/configuration_helpers.js'; interface Similarity { noteId: string; @@ -277,9 +277,10 @@ export async function findSimilarNotes( log.info('No embeddings found for specified provider, trying fallback providers...'); // Use the new configuration system - no string parsing! - const preferredProviders = await getEmbeddingProviderPrecedence(); + const selectedProvider = await getSelectedEmbeddingProvider(); + const preferredProviders = selectedProvider ? [selectedProvider] : []; - log.info(`Using provider precedence: ${preferredProviders.join(', ')}`); + log.info(`Using selected provider: ${selectedProvider || 'none'}`); // Try providers in precedence order for (const provider of preferredProviders) { diff --git a/apps/server/src/services/llm/index_service.ts b/apps/server/src/services/llm/index_service.ts index 08d79dcb1..957cfa7d7 100644 --- a/apps/server/src/services/llm/index_service.ts +++ b/apps/server/src/services/llm/index_service.ts @@ -266,7 +266,7 @@ export class IndexService { this.indexRebuildTotal = totalNotes; log.info("No embeddings found, starting full embedding generation first"); - await vectorStore.reprocessAllNotes(); + await this.reprocessAllNotes(); log.info("Full embedding generation initiated"); } else { // For index rebuild, use the number of embeddings as the total @@ -293,7 +293,7 @@ export class IndexService { // Only start indexing if we're below 90% completion or if embeddings exist but need optimization if (stats.percentComplete < 90) { log.info("Embedding coverage below 90%, starting full embedding generation"); - await vectorStore.reprocessAllNotes(); + await this.reprocessAllNotes(); log.info("Full embedding generation initiated"); } else { log.info(`Embedding coverage at ${stats.percentComplete}%, starting index optimization`); diff --git a/apps/server/src/services/llm/interfaces/ai_service_interfaces.ts b/apps/server/src/services/llm/interfaces/ai_service_interfaces.ts index 4130a8d55..52736cbd5 100644 --- a/apps/server/src/services/llm/interfaces/ai_service_interfaces.ts +++ b/apps/server/src/services/llm/interfaces/ai_service_interfaces.ts @@ -30,7 +30,7 @@ export interface AIServiceManagerConfig { export interface IAIServiceManager { getService(provider?: string): Promise; getAvailableProviders(): string[]; - getPreferredProvider(): string; + getSelectedProvider(): string; isProviderAvailable(provider: string): boolean; getProviderMetadata(provider: string): ProviderMetadata | null; getAIEnabled(): boolean; diff --git a/apps/server/src/services/llm/pipeline/stages/semantic_context_extraction_stage.ts b/apps/server/src/services/llm/pipeline/stages/semantic_context_extraction_stage.ts index bf2cc8fd7..139510663 100644 --- a/apps/server/src/services/llm/pipeline/stages/semantic_context_extraction_stage.ts +++ b/apps/server/src/services/llm/pipeline/stages/semantic_context_extraction_stage.ts @@ -50,7 +50,7 @@ export class SemanticContextExtractionStage extends BasePipelineStage