mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-08-10 10:22:29 +08:00
fix openai endpoints
This commit is contained in:
parent
8d8c34c0a2
commit
5aef80f4cf
@ -17,7 +17,7 @@ export class OpenAIService extends BaseAIService {
|
||||
}
|
||||
|
||||
const apiKey = options.getOption('openaiApiKey');
|
||||
const baseUrl = options.getOption('openaiBaseUrl') || 'https://api.openai.com';
|
||||
const baseUrl = options.getOption('openaiBaseUrl') || 'https://api.openai.com/v1';
|
||||
const model = opts.model || options.getOption('openaiDefaultModel') || 'gpt-3.5-turbo';
|
||||
const temperature = opts.temperature !== undefined
|
||||
? opts.temperature
|
||||
@ -32,7 +32,11 @@ export class OpenAIService extends BaseAIService {
|
||||
: [{ role: 'system', content: systemPrompt }, ...messages];
|
||||
|
||||
try {
|
||||
const endpoint = `${baseUrl.replace(/\/+$/, '')}/v1/chat/completions`;
|
||||
// Fix endpoint construction - ensure we don't double up on /v1
|
||||
const normalizedBaseUrl = baseUrl.replace(/\/+$/, '');
|
||||
const endpoint = normalizedBaseUrl.includes('/v1')
|
||||
? `${normalizedBaseUrl}/chat/completions`
|
||||
: `${normalizedBaseUrl}/v1/chat/completions`;
|
||||
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
|
@ -50,15 +50,33 @@ Example: ["exact topic mentioned", "related concept 1", "related concept 2"]`;
|
||||
|
||||
this.initPromise = (async () => {
|
||||
try {
|
||||
const providerId = await options.getOption('embeddingsDefaultProvider') || 'ollama';
|
||||
const providerId = await options.getOption('embeddingsDefaultProvider') || 'openai';
|
||||
this.provider = providerManager.getEmbeddingProvider(providerId);
|
||||
|
||||
// If specified provider not found, try openai as a fallback
|
||||
if (!this.provider && providerId !== 'openai') {
|
||||
log.info(`Embedding provider ${providerId} not found, trying openai as fallback`);
|
||||
this.provider = providerManager.getEmbeddingProvider('openai');
|
||||
}
|
||||
|
||||
// If openai not found, try ollama as a second fallback
|
||||
if (!this.provider && providerId !== 'ollama') {
|
||||
log.info(`Embedding provider openai not found, trying ollama as fallback`);
|
||||
this.provider = providerManager.getEmbeddingProvider('ollama');
|
||||
}
|
||||
|
||||
// Final fallback to local provider which should always exist
|
||||
if (!this.provider) {
|
||||
throw new Error(`Embedding provider ${providerId} not found`);
|
||||
log.info(`No embedding provider found, falling back to local provider`);
|
||||
this.provider = providerManager.getEmbeddingProvider('local');
|
||||
}
|
||||
|
||||
if (!this.provider) {
|
||||
throw new Error(`No embedding provider available. Could not initialize context service.`);
|
||||
}
|
||||
|
||||
this.initialized = true;
|
||||
log.info(`Trilium context service initialized with provider: ${providerId}`);
|
||||
log.info(`Trilium context service initialized with provider: ${this.provider.name}`);
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
log.error(`Failed to initialize Trilium context service: ${errorMessage}`);
|
||||
|
Loading…
x
Reference in New Issue
Block a user