mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-29 19:12:27 +08:00
refactor(llm): enhance configuration handling to avoid default assumptions and improve error handling
This commit is contained in:
parent
45175b6af3
commit
ce7c4a31a1
@ -532,7 +532,13 @@ export class AIServiceManager implements IAIServiceManager {
|
||||
*/
|
||||
async getPreferredProviderAsync(): Promise<string> {
|
||||
try {
|
||||
return await getPreferredProvider();
|
||||
const preferredProvider = await getPreferredProvider();
|
||||
if (preferredProvider === null) {
|
||||
// No providers configured, fallback to first available
|
||||
log.info('No providers configured in precedence, using first available provider');
|
||||
return this.providerOrder[0];
|
||||
}
|
||||
return preferredProvider;
|
||||
} catch (error) {
|
||||
log.error(`Error getting preferred provider: ${error}`);
|
||||
return this.providerOrder[0];
|
||||
|
@ -23,8 +23,11 @@ export async function getProviderPrecedence(): Promise<ProviderType[]> {
|
||||
/**
|
||||
* Get the default/preferred AI provider
|
||||
*/
|
||||
export async function getPreferredProvider(): Promise<ProviderType> {
|
||||
export async function getPreferredProvider(): Promise<ProviderType | null> {
|
||||
const config = await configurationManager.getProviderPrecedence();
|
||||
if (config.providers.length === 0) {
|
||||
return null; // No providers configured
|
||||
}
|
||||
return config.defaultProvider || config.providers[0];
|
||||
}
|
||||
|
||||
@ -39,8 +42,11 @@ export async function getEmbeddingProviderPrecedence(): Promise<string[]> {
|
||||
/**
|
||||
* Get the default embedding provider
|
||||
*/
|
||||
export async function getPreferredEmbeddingProvider(): Promise<string> {
|
||||
export async function getPreferredEmbeddingProvider(): Promise<string | null> {
|
||||
const config = await configurationManager.getEmbeddingProviderPrecedence();
|
||||
if (config.providers.length === 0) {
|
||||
return null; // No providers configured
|
||||
}
|
||||
return config.defaultProvider || config.providers[0];
|
||||
}
|
||||
|
||||
@ -61,9 +67,9 @@ export function createModelConfig(modelString: string, defaultProvider?: Provide
|
||||
/**
|
||||
* Get the default model for a specific provider
|
||||
*/
|
||||
export async function getDefaultModelForProvider(provider: ProviderType): Promise<string> {
|
||||
export async function getDefaultModelForProvider(provider: ProviderType): Promise<string | undefined> {
|
||||
const config = await configurationManager.getAIConfig();
|
||||
return config.defaultModels[provider];
|
||||
return config.defaultModels[provider]; // This can now be undefined
|
||||
}
|
||||
|
||||
/**
|
||||
@ -106,13 +112,17 @@ export async function isProviderConfigured(provider: ProviderType): Promise<bool
|
||||
export async function getFirstAvailableProvider(): Promise<ProviderType | null> {
|
||||
const providers = await getProviderPrecedence();
|
||||
|
||||
if (providers.length === 0) {
|
||||
return null; // No providers configured
|
||||
}
|
||||
|
||||
for (const provider of providers) {
|
||||
if (await isProviderConfigured(provider)) {
|
||||
return provider;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return null; // No providers are properly configured
|
||||
}
|
||||
|
||||
/**
|
||||
@ -128,3 +138,42 @@ export async function validateConfiguration() {
|
||||
export function clearConfigurationCache(): void {
|
||||
configurationManager.clearCache();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a model configuration with validation that no defaults are assumed
|
||||
*/
|
||||
export async function getValidModelConfig(provider: ProviderType): Promise<{ model: string; provider: ProviderType } | null> {
|
||||
const defaultModel = await getDefaultModelForProvider(provider);
|
||||
|
||||
if (!defaultModel) {
|
||||
// No default model configured for this provider
|
||||
return null;
|
||||
}
|
||||
|
||||
const isConfigured = await isProviderConfigured(provider);
|
||||
if (!isConfigured) {
|
||||
// Provider is not properly configured
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
model: defaultModel,
|
||||
provider
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first valid model configuration from the provider precedence list
|
||||
*/
|
||||
export async function getFirstValidModelConfig(): Promise<{ model: string; provider: ProviderType } | null> {
|
||||
const providers = await getProviderPrecedence();
|
||||
|
||||
for (const provider of providers) {
|
||||
const config = await getValidModelConfig(provider);
|
||||
if (config) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
|
||||
return null; // No valid model configuration found
|
||||
}
|
||||
|
@ -75,13 +75,14 @@ export class ConfigurationManager {
|
||||
|
||||
return {
|
||||
providers: providers as ProviderType[],
|
||||
defaultProvider: providers[0] as ProviderType
|
||||
defaultProvider: providers.length > 0 ? providers[0] as ProviderType : undefined
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(`Error parsing provider precedence: ${error}`);
|
||||
// Only return known providers if they exist, don't assume defaults
|
||||
return {
|
||||
providers: ['openai', 'anthropic', 'ollama'],
|
||||
defaultProvider: 'openai'
|
||||
providers: [],
|
||||
defaultProvider: undefined
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -96,13 +97,14 @@ export class ConfigurationManager {
|
||||
|
||||
return {
|
||||
providers: providers as EmbeddingProviderType[],
|
||||
defaultProvider: providers[0] as EmbeddingProviderType
|
||||
defaultProvider: providers.length > 0 ? providers[0] as EmbeddingProviderType : undefined
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(`Error parsing embedding provider precedence: ${error}`);
|
||||
// Don't assume defaults, return empty configuration
|
||||
return {
|
||||
providers: ['openai', 'ollama'],
|
||||
defaultProvider: 'openai'
|
||||
providers: [],
|
||||
defaultProvider: undefined
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -167,9 +169,9 @@ export class ConfigurationManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default models for each provider
|
||||
* Get default models for each provider - ONLY from user configuration
|
||||
*/
|
||||
public async getDefaultModels(): Promise<Record<ProviderType, string>> {
|
||||
public async getDefaultModels(): Promise<Record<ProviderType, string | undefined>> {
|
||||
try {
|
||||
const [openaiModel, anthropicModel, ollamaModel] = await Promise.all([
|
||||
options.getOption('openaiDefaultModel'),
|
||||
@ -178,16 +180,17 @@ export class ConfigurationManager {
|
||||
]);
|
||||
|
||||
return {
|
||||
openai: openaiModel || 'gpt-3.5-turbo',
|
||||
anthropic: anthropicModel || 'claude-3-sonnet-20240229',
|
||||
ollama: ollamaModel || 'llama2'
|
||||
openai: openaiModel || undefined,
|
||||
anthropic: anthropicModel || undefined,
|
||||
ollama: ollamaModel || undefined
|
||||
};
|
||||
} catch (error) {
|
||||
log.error(`Error loading default models: ${error}`);
|
||||
// Return undefined for all providers if we can't load config
|
||||
return {
|
||||
openai: 'gpt-3.5-turbo',
|
||||
anthropic: 'claude-3-sonnet-20240229',
|
||||
ollama: 'llama2'
|
||||
openai: undefined,
|
||||
anthropic: undefined,
|
||||
ollama: undefined
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -322,7 +325,8 @@ export class ConfigurationManager {
|
||||
|
||||
private parseProviderList(precedenceOption: string | null): string[] {
|
||||
if (!precedenceOption) {
|
||||
return ['openai', 'anthropic', 'ollama'];
|
||||
// Don't assume any defaults - return empty array
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
@ -344,7 +348,8 @@ export class ConfigurationManager {
|
||||
|
||||
} catch (error) {
|
||||
log.error(`Error parsing provider list "${precedenceOption}": ${error}`);
|
||||
return ['openai', 'anthropic', 'ollama'];
|
||||
// Don't assume defaults on parse error
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@ -352,17 +357,17 @@ export class ConfigurationManager {
|
||||
return {
|
||||
enabled: false,
|
||||
providerPrecedence: {
|
||||
providers: ['openai', 'anthropic', 'ollama'],
|
||||
defaultProvider: 'openai'
|
||||
providers: [],
|
||||
defaultProvider: undefined
|
||||
},
|
||||
embeddingProviderPrecedence: {
|
||||
providers: ['openai', 'ollama'],
|
||||
defaultProvider: 'openai'
|
||||
providers: [],
|
||||
defaultProvider: undefined
|
||||
},
|
||||
defaultModels: {
|
||||
openai: 'gpt-3.5-turbo',
|
||||
anthropic: 'claude-3-sonnet-20240229',
|
||||
ollama: 'llama2'
|
||||
openai: undefined,
|
||||
anthropic: undefined,
|
||||
ollama: undefined
|
||||
},
|
||||
providerSettings: {}
|
||||
};
|
||||
|
@ -48,7 +48,7 @@ export interface AIConfig {
|
||||
enabled: boolean;
|
||||
providerPrecedence: ProviderPrecedenceConfig;
|
||||
embeddingProviderPrecedence: EmbeddingProviderPrecedenceConfig;
|
||||
defaultModels: Record<ProviderType, string>;
|
||||
defaultModels: Record<ProviderType, string | undefined>;
|
||||
providerSettings: ProviderSettings;
|
||||
}
|
||||
|
||||
|
@ -100,22 +100,22 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
||||
}
|
||||
|
||||
// Get default provider and model using the new configuration system
|
||||
let defaultProvider: ProviderType = 'openai';
|
||||
let defaultModelName = 'gpt-3.5-turbo';
|
||||
|
||||
try {
|
||||
// Use the new configuration helpers - no string parsing!
|
||||
defaultProvider = await getPreferredProvider();
|
||||
defaultModelName = await getDefaultModelForProvider(defaultProvider);
|
||||
const preferredProvider = await getPreferredProvider();
|
||||
|
||||
log.info(`Selected provider: ${defaultProvider}, model: ${defaultModelName}`);
|
||||
} catch (error) {
|
||||
// If any error occurs, use the fallback default
|
||||
log.error(`Error determining default model: ${error}`);
|
||||
defaultProvider = 'openai';
|
||||
defaultModelName = 'gpt-3.5-turbo';
|
||||
if (!preferredProvider) {
|
||||
throw new Error('No AI providers are configured. Please check your AI settings.');
|
||||
}
|
||||
|
||||
const modelName = await getDefaultModelForProvider(preferredProvider);
|
||||
|
||||
if (!modelName) {
|
||||
throw new Error(`No default model configured for provider ${preferredProvider}. Please set a default model in your AI settings.`);
|
||||
}
|
||||
|
||||
log.info(`Selected provider: ${preferredProvider}, model: ${modelName}`);
|
||||
|
||||
// Determine query complexity
|
||||
let queryComplexity = 'low';
|
||||
if (query) {
|
||||
@ -143,18 +143,22 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
||||
}
|
||||
|
||||
// Set the model and add provider metadata
|
||||
updatedOptions.model = defaultModelName;
|
||||
this.addProviderMetadata(updatedOptions, defaultProvider as ServiceProviders, defaultModelName);
|
||||
updatedOptions.model = modelName;
|
||||
this.addProviderMetadata(updatedOptions, preferredProvider as ServiceProviders, modelName);
|
||||
|
||||
log.info(`Selected model: ${defaultModelName} from provider: ${defaultProvider} for query complexity: ${queryComplexity}`);
|
||||
log.info(`Selected model: ${modelName} from provider: ${preferredProvider} for query complexity: ${queryComplexity}`);
|
||||
log.info(`[ModelSelectionStage] Final options: ${JSON.stringify({
|
||||
model: updatedOptions.model,
|
||||
stream: updatedOptions.stream,
|
||||
provider: defaultProvider,
|
||||
provider: preferredProvider,
|
||||
enableTools: updatedOptions.enableTools
|
||||
})}`);
|
||||
|
||||
return { options: updatedOptions };
|
||||
} catch (error) {
|
||||
log.error(`Error determining default model: ${error}`);
|
||||
throw new Error(`Failed to determine AI model configuration: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -225,6 +229,10 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
||||
const defaultProvider = availableProviders[0];
|
||||
const defaultModel = await getDefaultModelForProvider(defaultProvider);
|
||||
|
||||
if (!defaultModel) {
|
||||
throw new Error(`No default model configured for provider ${defaultProvider}. Please configure a default model in your AI settings.`);
|
||||
}
|
||||
|
||||
// Set provider metadata
|
||||
if (!input.options.providerMetadata) {
|
||||
input.options.providerMetadata = {
|
||||
@ -237,8 +245,7 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
||||
return defaultModel;
|
||||
} catch (error) {
|
||||
log.error(`Error determining default model: ${error}`);
|
||||
// Fallback to hardcoded default
|
||||
return 'gpt-3.5-turbo';
|
||||
throw error; // Don't provide fallback defaults, let the error propagate
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user