From 5aef80f4cf16f84e592e4df61bc289f1e1793191 Mon Sep 17 00:00:00 2001 From: perf3ct Date: Mon, 17 Mar 2025 19:36:58 +0000 Subject: [PATCH] fix openai endpoints --- src/services/llm/providers/openai_service.ts | 8 +++++-- src/services/llm/trilium_context_service.ts | 24 +++++++++++++++++--- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/services/llm/providers/openai_service.ts b/src/services/llm/providers/openai_service.ts index f645f3433..90ded4544 100644 --- a/src/services/llm/providers/openai_service.ts +++ b/src/services/llm/providers/openai_service.ts @@ -17,7 +17,7 @@ export class OpenAIService extends BaseAIService { } const apiKey = options.getOption('openaiApiKey'); - const baseUrl = options.getOption('openaiBaseUrl') || 'https://api.openai.com'; + const baseUrl = options.getOption('openaiBaseUrl') || 'https://api.openai.com/v1'; const model = opts.model || options.getOption('openaiDefaultModel') || 'gpt-3.5-turbo'; const temperature = opts.temperature !== undefined ? opts.temperature @@ -32,7 +32,11 @@ export class OpenAIService extends BaseAIService { : [{ role: 'system', content: systemPrompt }, ...messages]; try { - const endpoint = `${baseUrl.replace(/\/+$/, '')}/v1/chat/completions`; + // Fix endpoint construction - ensure we don't double up on /v1 + const normalizedBaseUrl = baseUrl.replace(/\/+$/, ''); + const endpoint = normalizedBaseUrl.includes('/v1') + ? `${normalizedBaseUrl}/chat/completions` + : `${normalizedBaseUrl}/v1/chat/completions`; const response = await fetch(endpoint, { method: 'POST', diff --git a/src/services/llm/trilium_context_service.ts b/src/services/llm/trilium_context_service.ts index f56021d5e..728d720ef 100644 --- a/src/services/llm/trilium_context_service.ts +++ b/src/services/llm/trilium_context_service.ts @@ -50,15 +50,33 @@ Example: ["exact topic mentioned", "related concept 1", "related concept 2"]`; this.initPromise = (async () => { try { - const providerId = await options.getOption('embeddingsDefaultProvider') || 'ollama'; + const providerId = await options.getOption('embeddingsDefaultProvider') || 'openai'; this.provider = providerManager.getEmbeddingProvider(providerId); + // If specified provider not found, try openai as a fallback + if (!this.provider && providerId !== 'openai') { + log.info(`Embedding provider ${providerId} not found, trying openai as fallback`); + this.provider = providerManager.getEmbeddingProvider('openai'); + } + + // If openai not found, try ollama as a second fallback + if (!this.provider && providerId !== 'ollama') { + log.info(`Embedding provider openai not found, trying ollama as fallback`); + this.provider = providerManager.getEmbeddingProvider('ollama'); + } + + // Final fallback to local provider which should always exist if (!this.provider) { - throw new Error(`Embedding provider ${providerId} not found`); + log.info(`No embedding provider found, falling back to local provider`); + this.provider = providerManager.getEmbeddingProvider('local'); + } + + if (!this.provider) { + throw new Error(`No embedding provider available. Could not initialize context service.`); } this.initialized = true; - log.info(`Trilium context service initialized with provider: ${providerId}`); + log.info(`Trilium context service initialized with provider: ${this.provider.name}`); } catch (error: unknown) { const errorMessage = error instanceof Error ? error.message : String(error); log.error(`Failed to initialize Trilium context service: ${errorMessage}`);