From fe15a0378a0a559404b570d335134a42d749230b Mon Sep 17 00:00:00 2001 From: perf3ct Date: Wed, 4 Jun 2025 20:23:06 +0000 Subject: [PATCH] fix(llm): have the model_selection_stage use the instance of the aiServiceManager --- .../pipeline/stages/model_selection_stage.ts | 170 +++++------------- 1 file changed, 49 insertions(+), 121 deletions(-) diff --git a/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts b/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts index 1be17d7d7..a1c595b18 100644 --- a/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts +++ b/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts @@ -100,25 +100,28 @@ export class ModelSelectionStage extends BasePipelineStage { try { - // Use the new single provider configuration system + // Use the same logic as the main process method + const { getValidModelConfig, getSelectedProvider } = await import('../../config/configuration_helpers.js'); const selectedProvider = await getSelectedProvider(); if (!selectedProvider) { throw new Error('No AI provider is selected. Please select a provider in your AI settings.'); } - // Check if the provider is available + // Check if the provider is available through the service manager if (!aiServiceManager.isProviderAvailable(selectedProvider)) { throw new Error(`Selected provider ${selectedProvider} is not available`); } - // Get the default model for the selected provider - const defaultModel = await getDefaultModelForProvider(selectedProvider); - - if (!defaultModel) { + // Try to get a valid model config + const modelConfig = await getValidModelConfig(selectedProvider); + + if (!modelConfig) { throw new Error(`No default model configured for provider ${selectedProvider}. Please configure a default model in your AI settings.`); } @@ -243,12 +248,12 @@ export class ModelSelectionStage extends BasePipelineStage { try { - log.info(`Fetching available models for provider ${provider}`); + log.info(`Getting default model for provider ${provider} using AI service manager`); - // Import server-side options to update the default model - const optionService = (await import('../../../options.js')).default; + // Use the existing AI service manager instead of duplicating API calls + const service = aiServiceManager.getInstance().getService(provider); - switch (provider) { - case 'openai': - const openaiModels = await this.fetchOpenAIModels(); - if (openaiModels.length > 0) { - // Use the first available model without any preferences - const selectedModel = openaiModels[0]; + if (!service || !service.isAvailable()) { + log.info(`Provider ${provider} service is not available`); + return null; + } + + // Check if the service has a method to get available models + if (typeof (service as any).getAvailableModels === 'function') { + try { + const models = await (service as any).getAvailableModels(); + if (models && models.length > 0) { + // Use the first available model - no hardcoded preferences + const selectedModel = models[0]; - await optionService.setOption('openaiDefaultModel', selectedModel); - log.info(`Set default OpenAI model to: ${selectedModel}`); + // Import server-side options to update the default model + const optionService = (await import('../../../options.js')).default; + const optionKey = `${provider}DefaultModel` as const; + + await optionService.setOption(optionKey, selectedModel); + log.info(`Set default ${provider} model to: ${selectedModel}`); return selectedModel; } - break; - - case 'anthropic': - const anthropicModels = await this.fetchAnthropicModels(); - if (anthropicModels.length > 0) { - // Use the first available model without any preferences - const selectedModel = anthropicModels[0]; - - await optionService.setOption('anthropicDefaultModel', selectedModel); - log.info(`Set default Anthropic model to: ${selectedModel}`); - return selectedModel; - } - break; - - case 'ollama': - const ollamaModels = await this.fetchOllamaModels(); - if (ollamaModels.length > 0) { - // Use the first available model without any preferences - const selectedModel = ollamaModels[0]; - - await optionService.setOption('ollamaDefaultModel', selectedModel); - log.info(`Set default Ollama model to: ${selectedModel}`); - return selectedModel; - } - break; + } catch (modelError) { + log.error(`Error fetching models from ${provider} service: ${modelError}`); + } } - log.info(`No models available for provider ${provider}`); + log.info(`Provider ${provider} does not support dynamic model fetching`); return null; } catch (error) { - log.error(`Error fetching models for provider ${provider}: ${error}`); + log.error(`Error getting default model for provider ${provider}: ${error}`); return null; } } - - /** - * Fetch available OpenAI models - */ - private async fetchOpenAIModels(): Promise { - try { - // Use the provider service to get available models - const aiServiceManager = (await import('../../ai_service_manager.js')).default; - const service = aiServiceManager.getInstance().getService('openai'); - - if (service && typeof (service as any).getAvailableModels === 'function') { - return await (service as any).getAvailableModels(); - } - - // No fallback - return empty array if models can't be fetched - log.info('OpenAI service does not support getAvailableModels method'); - return []; - } catch (error) { - log.error(`Error fetching OpenAI models: ${error}`); - return []; - } - } - - /** - * Fetch available Anthropic models - */ - private async fetchAnthropicModels(): Promise { - try { - // Use the provider service to get available models - const aiServiceManager = (await import('../../ai_service_manager.js')).default; - const service = aiServiceManager.getInstance().getService('anthropic'); - - if (service && typeof (service as any).getAvailableModels === 'function') { - return await (service as any).getAvailableModels(); - } - - // No fallback - return empty array if models can't be fetched - log.info('Anthropic service does not support getAvailableModels method'); - return []; - } catch (error) { - log.error(`Error fetching Anthropic models: ${error}`); - return []; - } - } - - /** - * Fetch available Ollama models - */ - private async fetchOllamaModels(): Promise { - try { - // Use the provider service to get available models - const aiServiceManager = (await import('../../ai_service_manager.js')).default; - const service = aiServiceManager.getInstance().getService('ollama'); - - if (service && typeof (service as any).getAvailableModels === 'function') { - return await (service as any).getAvailableModels(); - } - - // No fallback - return empty array if models can't be fetched - log.info('Ollama service does not support getAvailableModels method'); - return []; - } catch (error) { - log.error(`Error fetching Ollama models: ${error}`); - return []; - } - } }