feat(llm): remove provider "list" in favor of only 1 provider selection for embedding/chat

This commit is contained in:
perf3ct 2025-06-04 17:40:08 +00:00
parent 8b2d951ad1
commit 5a25fb51d9
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
7 changed files with 54 additions and 62 deletions

View File

@ -65,7 +65,7 @@ export default class AiSettingsWidget extends OptionsWidget {
// Core AI options
this.setupChangeHandler('.ai-enabled', 'aiEnabled', true, true);
this.setupChangeHandler('.ai-provider-precedence', 'aiProviderPrecedence', true);
this.setupChangeHandler('.ai-chat-provider', 'aiChatProvider', true);
this.setupChangeHandler('.ai-temperature', 'aiTemperature');
this.setupChangeHandler('.ai-system-prompt', 'aiSystemPrompt');
@ -132,7 +132,7 @@ export default class AiSettingsWidget extends OptionsWidget {
this.setupChangeHandler('.enable-automatic-indexing', 'enableAutomaticIndexing', false, true);
this.setupChangeHandler('.embedding-similarity-threshold', 'embeddingSimilarityThreshold');
this.setupChangeHandler('.max-notes-per-llm-query', 'maxNotesPerLlmQuery');
this.setupChangeHandler('.embedding-provider-precedence', 'embeddingProviderPrecedence', true);
this.setupChangeHandler('.ai-embedding-provider', 'aiEmbeddingProvider', true);
this.setupChangeHandler('.embedding-dimension-strategy', 'embeddingDimensionStrategy');
this.setupChangeHandler('.embedding-batch-size', 'embeddingBatchSize');
this.setupChangeHandler('.embedding-update-interval', 'embeddingUpdateInterval');
@ -194,42 +194,26 @@ export default class AiSettingsWidget extends OptionsWidget {
return;
}
// Get provider precedence
const providerPrecedence = (this.$widget.find('.ai-provider-precedence').val() as string || '').split(',');
// Get selected chat provider
const selectedChatProvider = this.$widget.find('.ai-chat-provider').val() as string;
// Check for OpenAI configuration if it's in the precedence list
const openaiWarnings: string[] = [];
if (providerPrecedence.includes('openai')) {
// Check for configuration issues with the selected provider
const chatWarnings: string[] = [];
if (selectedChatProvider === 'openai') {
const openaiApiKey = this.$widget.find('.openai-api-key').val();
if (!openaiApiKey) {
openaiWarnings.push(t("ai_llm.empty_key_warning.openai"));
chatWarnings.push(t("ai_llm.empty_key_warning.openai"));
}
}
// Check for Anthropic configuration if it's in the precedence list
const anthropicWarnings: string[] = [];
if (providerPrecedence.includes('anthropic')) {
} else if (selectedChatProvider === 'anthropic') {
const anthropicApiKey = this.$widget.find('.anthropic-api-key').val();
if (!anthropicApiKey) {
anthropicWarnings.push(t("ai_llm.empty_key_warning.anthropic"));
chatWarnings.push(t("ai_llm.empty_key_warning.anthropic"));
}
}
// Check for Voyage configuration if it's in the precedence list
const voyageWarnings: string[] = [];
if (providerPrecedence.includes('voyage')) {
const voyageApiKey = this.$widget.find('.voyage-api-key').val();
if (!voyageApiKey) {
voyageWarnings.push(t("ai_llm.empty_key_warning.voyage"));
}
}
// Check for Ollama configuration if it's in the precedence list
const ollamaWarnings: string[] = [];
if (providerPrecedence.includes('ollama')) {
} else if (selectedChatProvider === 'ollama') {
const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val();
if (!ollamaBaseUrl) {
ollamaWarnings.push(t("ai_llm.ollama_no_url"));
chatWarnings.push(t("ai_llm.ollama_no_url"));
}
}
@ -238,27 +222,20 @@ export default class AiSettingsWidget extends OptionsWidget {
const embeddingsEnabled = this.$widget.find('.enable-automatic-indexing').prop('checked');
if (embeddingsEnabled) {
const embeddingProviderPrecedence = (this.$widget.find('.embedding-provider-precedence').val() as string || '').split(',');
const selectedEmbeddingProvider = this.$widget.find('.ai-embedding-provider').val() as string;
if (embeddingProviderPrecedence.includes('openai') && !this.$widget.find('.openai-api-key').val()) {
if (selectedEmbeddingProvider === 'openai' && !this.$widget.find('.openai-api-key').val()) {
embeddingWarnings.push(t("ai_llm.empty_key_warning.openai"));
}
if (embeddingProviderPrecedence.includes('voyage') && !this.$widget.find('.voyage-api-key').val()) {
} else if (selectedEmbeddingProvider === 'voyage' && !this.$widget.find('.voyage-api-key').val()) {
embeddingWarnings.push(t("ai_llm.empty_key_warning.voyage"));
}
if (embeddingProviderPrecedence.includes('ollama') && !this.$widget.find('.ollama-base-url').val()) {
} else if (selectedEmbeddingProvider === 'ollama' && !this.$widget.find('.ollama-base-url').val()) {
embeddingWarnings.push(t("ai_llm.empty_key_warning.ollama"));
}
}
// Combine all warnings
const allWarnings = [
...openaiWarnings,
...anthropicWarnings,
...voyageWarnings,
...ollamaWarnings,
...chatWarnings,
...embeddingWarnings
];
@ -459,7 +436,7 @@ export default class AiSettingsWidget extends OptionsWidget {
this.$widget.find('.ai-enabled').prop('checked', options.aiEnabled !== 'false');
this.$widget.find('.ai-temperature').val(options.aiTemperature || '0.7');
this.$widget.find('.ai-system-prompt').val(options.aiSystemPrompt || '');
this.$widget.find('.ai-provider-precedence').val(options.aiProviderPrecedence || 'openai,anthropic,ollama');
this.$widget.find('.ai-chat-provider').val(options.aiChatProvider || '');
// OpenAI Section
this.$widget.find('.openai-api-key').val(options.openaiApiKey || '');
@ -482,7 +459,7 @@ export default class AiSettingsWidget extends OptionsWidget {
this.$widget.find('.ollama-embedding-model').val(options.ollamaEmbeddingModel || 'nomic-embed-text');
// Embedding Options
this.$widget.find('.embedding-provider-precedence').val(options.embeddingProviderPrecedence || 'openai,voyage,ollama,local');
this.$widget.find('.ai-embedding-provider').val(options.aiEmbeddingProvider || '');
this.$widget.find('.embedding-auto-update-enabled').prop('checked', options.embeddingAutoUpdateEnabled !== 'false');
this.$widget.find('.enable-automatic-indexing').prop('checked', options.enableAutomaticIndexing !== 'false');
this.$widget.find('.embedding-similarity-threshold').val(options.embeddingSimilarityThreshold || '0.75');

View File

@ -61,9 +61,14 @@ export const TPL = `
<h4>${t("ai_llm.provider_configuration")}</h4>
<div class="form-group">
<label>${t("ai_llm.provider_precedence")}</label>
<input type="text" class="ai-provider-precedence form-control" placeholder="openai,anthropic,ollama">
<div class="form-text">${t("ai_llm.provider_precedence_description")}</div>
<label>${t("ai_llm.chat_provider")}</label>
<select class="ai-chat-provider form-control">
<option value="">-- Select a provider --</option>
<option value="openai">OpenAI</option>
<option value="anthropic">Anthropic</option>
<option value="ollama">Ollama</option>
</select>
<div class="form-text">${t("ai_llm.chat_provider_description")}</div>
</div>
<div class="form-group">
@ -225,9 +230,15 @@ export const TPL = `
<h4>${t("ai_llm.embeddings_configuration")}</h4>
<div class="form-group">
<label class="embedding-provider-label">${t("ai_llm.embedding_provider_precedence")}</label>
<input type="text" class="embedding-provider-precedence form-control" placeholder="openai,voyage,ollama,local">
<div class="form-text">${t("ai_llm.embedding_provider_precedence_description")}</div>
<label class="embedding-provider-label">${t("ai_llm.embedding_provider")}</label>
<select class="ai-embedding-provider form-control">
<option value="">-- Select a provider --</option>
<option value="openai">OpenAI</option>
<option value="voyage">Voyage</option>
<option value="ollama">Ollama</option>
<option value="local">Local</option>
</select>
<div class="form-text">${t("ai_llm.embedding_provider_description")}</div>
</div>
<div class="form-group">
@ -297,9 +308,4 @@ export const TPL = `
<div class="form-text">${t("ai_llm.rebuild_index_description")}</div>
</div>
<!-- Note about embedding provider precedence -->
<div class="form-group mt-3">
<h5>${t("ai_llm.embedding_providers_order")}</h5>
<div class="form-text mt-2">${t("ai_llm.embedding_providers_order_description")}</div>
</div>
</div>`;

View File

@ -8,6 +8,7 @@ import type { Request } from "express";
import { changeLanguage, getLocales } from "../../services/i18n.js";
import type { OptionNames } from "@triliumnext/commons";
import config from "../../services/config.js";
import aiServiceManager from "../../services/llm/ai_service_manager.js";
interface UserTheme {
val: string; // value of the theme, used in the URL
@ -95,7 +96,7 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([
"aiEnabled",
"aiTemperature",
"aiSystemPrompt",
"aiProviderPrecedence",
"aiChatProvider",
"openaiApiKey",
"openaiBaseUrl",
"openaiDefaultModel",
@ -110,7 +111,7 @@ const ALLOWED_OPTIONS = new Set<OptionNames>([
"ollamaEmbeddingModel",
"embeddingAutoUpdateEnabled",
"embeddingDimensionStrategy",
"embeddingProviderPrecedence",
"aiEmbeddingProvider",
"embeddingSimilarityThreshold",
"embeddingBatchSize",
"embeddingUpdateInterval",
@ -178,6 +179,14 @@ function update(name: string, value: string) {
changeLanguage(value);
}
// Reinitialize AI service manager when provider settings change
if (name === "aiChatProvider" || name === "aiEmbeddingProvider") {
// Run asynchronously to avoid blocking the response
aiServiceManager.reinitialize().catch(error => {
log.error(`Failed to reinitialize AI service manager: ${error}`);
});
}
return true;
}

View File

@ -273,7 +273,7 @@ export async function findSimilarNotes(
} else {
// Use dedicated embedding provider precedence from options for other strategies
let preferredProviders: string[] = [];
const embeddingPrecedence = await options.getOption('embeddingProviderPrecedence');
const embeddingPrecedence = await options.getOption('aiEmbeddingProvider');
if (embeddingPrecedence) {
// For "comma,separated,values"

View File

@ -501,7 +501,7 @@ export class IndexService {
const options = (await import('../options.js')).default;
let preferredProviders: string[] = [];
const embeddingPrecedence = await options.getOption('embeddingProviderPrecedence');
const embeddingPrecedence = await options.getOption('aiEmbeddingProvider');
let provider;
if (embeddingPrecedence) {

View File

@ -212,9 +212,9 @@ const defaultOptions: DefaultOption[] = [
// Adding missing AI options
{ name: "aiTemperature", value: "0.7", isSynced: true },
{ name: "aiSystemPrompt", value: "", isSynced: true },
{ name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true },
{ name: "aiChatProvider", value: "openai", isSynced: true },
{ name: "embeddingDimensionStrategy", value: "auto", isSynced: true },
{ name: "embeddingProviderPrecedence", value: "openai,voyage,ollama,local", isSynced: true },
{ name: "aiEmbeddingProvider", value: "openai", isSynced: true },
{ name: "embeddingSimilarityThreshold", value: "0.75", isSynced: true },
{ name: "enableAutomaticIndexing", value: "true", isSynced: true },
{ name: "maxNotesPerLlmQuery", value: "3", isSynced: true },

View File

@ -142,7 +142,7 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi
ollamaDefaultModel: string;
ollamaEmbeddingModel: string;
codeOpenAiModel: string;
aiProviderPrecedence: string;
aiChatProvider: string;
// Embedding-related options
embeddingAutoUpdateEnabled: boolean;
@ -150,7 +150,7 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi
embeddingBatchSize: number;
embeddingDefaultDimension: number;
embeddingsDefaultProvider: string;
embeddingProviderPrecedence: string;
aiEmbeddingProvider: string;
enableAutomaticIndexing: boolean;
embeddingGenerationLocation: string;
embeddingDimensionStrategy: string;