diff --git a/src/public/app/widgets/llm_chat_panel.ts b/src/public/app/widgets/llm_chat_panel.ts
index 4c9eb5c4d..aaedb1d21 100644
--- a/src/public/app/widgets/llm_chat_panel.ts
+++ b/src/public/app/widgets/llm_chat_panel.ts
@@ -757,9 +757,6 @@ export default class LlmChatPanel extends BasicWidget {
return;
}
- // Get the default embedding provider
- const defaultProvider = options.get('embeddingsDefaultProvider') || 'openai';
-
// Get provider precedence
const precedenceStr = options.get('aiProviderPrecedence') || 'openai,anthropic,ollama';
let precedenceList: string[] = [];
@@ -800,8 +797,6 @@ export default class LlmChatPanel extends BasicWidget {
enabledProviders.push('local');
// Perform validation checks
- const defaultInPrecedence = precedenceList.includes(defaultProvider);
- const defaultIsEnabled = enabledProviders.includes(defaultProvider);
const allPrecedenceEnabled = precedenceList.every((p: string) => enabledProviders.includes(p));
// Get embedding queue status
@@ -820,19 +815,11 @@ export default class LlmChatPanel extends BasicWidget {
const hasEmbeddingsInQueue = queuedNotes > 0;
// Show warning if there are issues
- if (!defaultInPrecedence || !defaultIsEnabled || !allPrecedenceEnabled || hasEmbeddingsInQueue) {
+ if (!allPrecedenceEnabled || hasEmbeddingsInQueue) {
let message = 'AI Provider Configuration Issues';
message += '
';
- if (!defaultInPrecedence) {
- message += `- The default embedding provider "${defaultProvider}" is not in your provider precedence list.
`;
- }
-
- if (!defaultIsEnabled) {
- message += `- The default embedding provider "${defaultProvider}" is not enabled.
`;
- }
-
if (!allPrecedenceEnabled) {
const disabledProviders = precedenceList.filter((p: string) => !enabledProviders.includes(p));
message += `- The following providers in your precedence list are not enabled: ${disabledProviders.join(', ')}.
`;
diff --git a/src/public/app/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts b/src/public/app/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
index aa5e35f28..9df6e4d1a 100644
--- a/src/public/app/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
+++ b/src/public/app/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
@@ -132,9 +132,8 @@ export default class AiSettingsWidget extends OptionsWidget {
this.setupChangeHandler('.enable-automatic-indexing', 'enableAutomaticIndexing', false, true);
this.setupChangeHandler('.embedding-similarity-threshold', 'embeddingSimilarityThreshold');
this.setupChangeHandler('.max-notes-per-llm-query', 'maxNotesPerLlmQuery');
- this.setupChangeHandler('.embedding-default-provider', 'embeddingsDefaultProvider', true);
- this.setupChangeHandler('.embedding-dimension-strategy', 'embeddingDimensionStrategy');
this.setupChangeHandler('.embedding-provider-precedence', 'embeddingProviderPrecedence', true);
+ this.setupChangeHandler('.embedding-dimension-strategy', 'embeddingDimensionStrategy');
// No sortable behavior needed anymore
@@ -504,4 +503,4 @@ export default class AiSettingsWidget extends OptionsWidget {
this.indexRebuildRefreshInterval = null;
}
}
-}
\ No newline at end of file
+}
diff --git a/src/routes/api/options.ts b/src/routes/api/options.ts
index 6fdd32d12..bbb351f25 100644
--- a/src/routes/api/options.ts
+++ b/src/routes/api/options.ts
@@ -83,36 +83,32 @@ const ALLOWED_OPTIONS = new Set([
// AI/LLM integration options
"aiEnabled",
+ "aiTemperature",
+ "aiSystemPrompt",
+ "aiProviderPrecedence",
"openaiApiKey",
+ "openaiBaseUrl",
"openaiDefaultModel",
"openaiEmbeddingModel",
- "openaiBaseUrl",
"anthropicApiKey",
- "anthropicDefaultModel",
- "voyageEmbeddingModel",
- "voyageApiKey",
"anthropicBaseUrl",
- "ollamaEnabled",
+ "anthropicDefaultModel",
+ "voyageApiKey",
+ "voyageEmbeddingModel",
"ollamaBaseUrl",
"ollamaDefaultModel",
"ollamaEmbeddingModel",
- "aiProviderPrecedence",
- "aiTemperature",
- "aiSystemPrompt",
-
- // Embedding options
"embeddingAutoUpdateEnabled",
- "embeddingBatchSize",
- "embeddingUpdateInterval",
- "embeddingDefaultDimension",
- "embeddingsDefaultProvider",
+ "embeddingDimensionStrategy",
"embeddingProviderPrecedence",
"embeddingSimilarityThreshold",
- "maxNotesPerLlmQuery",
+ "embeddingBatchSize",
+ "embeddingUpdateInterval",
"enableAutomaticIndexing",
- "embeddingGenerationLocation",
- "embeddingDimensionStrategy",
- "splitEditorOrientation",
+ "maxNotesPerLlmQuery",
+
+ // Embedding options
+ "embeddingDefaultDimension",
"mfaEnabled",
"mfaMethod"
]);
diff --git a/src/services/llm/ai_service_manager.ts b/src/services/llm/ai_service_manager.ts
index d0cb3fb68..d7ee0d3b2 100644
--- a/src/services/llm/ai_service_manager.ts
+++ b/src/services/llm/ai_service_manager.ts
@@ -115,9 +115,6 @@ export class AIServiceManager implements IAIServiceManager {
return null;
}
- // Get default embedding provider
- const defaultProviderName = await options.getOption('embeddingsDefaultProvider') || 'openai';
-
// Parse provider precedence list (similar to updateProviderOrder)
let precedenceList: string[] = [];
const precedenceOption = await options.getOption('aiProviderPrecedence');
@@ -138,28 +135,14 @@ export class AIServiceManager implements IAIServiceManager {
const enabledProviders = await getEnabledEmbeddingProviders();
const enabledProviderNames = enabledProviders.map(p => p.name);
- // Check if default provider is in precedence list
- const defaultInPrecedence = precedenceList.includes(defaultProviderName);
-
- // Check if default provider is enabled
- const defaultIsEnabled = enabledProviderNames.includes(defaultProviderName);
-
// Check if all providers in precedence list are enabled
const allPrecedenceEnabled = precedenceList.every(p =>
enabledProviderNames.includes(p) || p === 'local');
// Return warning message if there are issues
- if (!defaultInPrecedence || !defaultIsEnabled || !allPrecedenceEnabled) {
+ if (!allPrecedenceEnabled) {
let message = 'There are issues with your AI provider configuration:';
- if (!defaultInPrecedence) {
- message += `\n• The default embedding provider "${defaultProviderName}" is not in your provider precedence list.`;
- }
-
- if (!defaultIsEnabled) {
- message += `\n• The default embedding provider "${defaultProviderName}" is not enabled.`;
- }
-
if (!allPrecedenceEnabled) {
const disabledProviders = precedenceList.filter(p =>
!enabledProviderNames.includes(p) && p !== 'local');
@@ -354,7 +337,21 @@ export class AIServiceManager implements IAIServiceManager {
return;
}
- const preferredProvider = options.getOption('embeddingsDefaultProvider') || 'openai';
+ // Get provider precedence list
+ const precedenceOption = await options.getOption('embeddingProviderPrecedence');
+ let precedenceList: string[] = [];
+
+ if (precedenceOption) {
+ if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
+ precedenceList = JSON.parse(precedenceOption);
+ } else if (typeof precedenceOption === 'string') {
+ if (precedenceOption.includes(',')) {
+ precedenceList = precedenceOption.split(',').map(p => p.trim());
+ } else {
+ precedenceList = [precedenceOption];
+ }
+ }
+ }
// Check if we have enabled providers
const enabledProviders = await getEnabledEmbeddingProviders();
@@ -364,13 +361,6 @@ export class AIServiceManager implements IAIServiceManager {
return;
}
- // Validate that preferred provider is enabled
- const isPreferredEnabled = enabledProviders.some(p => p.name === preferredProvider);
-
- if (!isPreferredEnabled) {
- log.info(`Preferred provider "${preferredProvider}" is not enabled. Using first available.`);
- }
-
// Initialize embedding providers
log.info('Embedding providers initialized successfully');
} catch (error: any) {
diff --git a/src/services/llm/context/modules/provider_manager.ts b/src/services/llm/context/modules/provider_manager.ts
index aac24c7b4..8030e3592 100644
--- a/src/services/llm/context/modules/provider_manager.ts
+++ b/src/services/llm/context/modules/provider_manager.ts
@@ -20,44 +20,32 @@ export class ProviderManager {
*/
async getPreferredEmbeddingProvider(): Promise {
try {
- // First try user's configured default provider
- const providerId = await options.getOption('embeddingsDefaultProvider');
- if (providerId) {
+ // Try to get providers based on precedence list
+ const precedenceOption = await options.getOption('embeddingProviderPrecedence');
+ let precedenceList: string[] = [];
+
+ if (precedenceOption) {
+ if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
+ precedenceList = JSON.parse(precedenceOption);
+ } else if (typeof precedenceOption === 'string') {
+ if (precedenceOption.includes(',')) {
+ precedenceList = precedenceOption.split(',').map(p => p.trim());
+ } else {
+ precedenceList = [precedenceOption];
+ }
+ }
+ }
+
+ // Try each provider in the precedence list
+ for (const providerId of precedenceList) {
const provider = await getEmbeddingProvider(providerId);
if (provider) {
- log.info(`Using configured embedding provider: ${providerId}`);
+ log.info(`Using embedding provider from precedence list: ${providerId}`);
return provider;
}
}
- // Then try OpenAI
- const openaiKey = await options.getOption('openaiApiKey');
- if (openaiKey) {
- const provider = await getEmbeddingProvider('openai');
- if (provider) {
- log.info('Using OpenAI embeddings provider');
- return provider;
- }
- }
-
- // Try Anthropic
- const anthropicKey = await options.getOption('anthropicApiKey');
- if (anthropicKey) {
- const provider = await getEmbeddingProvider('anthropic');
- if (provider) {
- log.info('Using Anthropic embeddings provider');
- return provider;
- }
- }
-
- // Try Ollama
- const provider = await getEmbeddingProvider('ollama');
- if (provider) {
- log.info('Using Ollama embeddings provider');
- return provider;
- }
-
- // If no preferred providers, get any enabled provider
+ // If no provider from precedence list is available, try any enabled provider
const providers = await getEnabledEmbeddingProviders();
if (providers.length > 0) {
log.info(`Using available embedding provider: ${providers[0].name}`);
diff --git a/src/services/options_init.ts b/src/services/options_init.ts
index 6262ece2b..1070e7887 100644
--- a/src/services/options_init.ts
+++ b/src/services/options_init.ts
@@ -197,12 +197,11 @@ const defaultOptions: DefaultOption[] = [
{ name: "aiTemperature", value: "0.7", isSynced: true },
{ name: "aiSystemPrompt", value: "", isSynced: true },
{ name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true },
- { name: "embeddingsDefaultProvider", value: "openai", isSynced: true },
- { name: "embeddingProviderPrecedence", value: "openai,voyage,ollama", isSynced: true },
- { name: "embeddingDimensionStrategy", value: "native", isSynced: true },
+ { name: "embeddingDimensionStrategy", value: "auto", isSynced: true },
+ { name: "embeddingProviderPrecedence", value: "openai,voyage,ollama,local", isSynced: true },
+ { name: "embeddingSimilarityThreshold", value: "0.75", isSynced: true },
{ name: "enableAutomaticIndexing", value: "true", isSynced: true },
- { name: "embeddingSimilarityThreshold", value: "0.65", isSynced: true },
- { name: "maxNotesPerLlmQuery", value: "10", isSynced: true },
+ { name: "maxNotesPerLlmQuery", value: "3", isSynced: true },
{ name: "embeddingBatchSize", value: "10", isSynced: true },
{ name: "embeddingUpdateInterval", value: "5000", isSynced: true },
{ name: "embeddingDefaultDimension", value: "1536", isSynced: true },