mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-27 10:02:59 +08:00
feat(llm): update logic for using only a single provider instead of having provider "list"
This commit is contained in:
parent
5a25fb51d9
commit
d5d5e18b5d
@ -16,51 +16,36 @@ export async function validateEmbeddingProviders(validationWarning: HTMLElement)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get precedence list from options
|
// Get selected chat provider
|
||||||
const precedenceStr = options.get('aiProviderPrecedence') || 'openai,anthropic,ollama';
|
const selectedProvider = options.get('aiChatProvider');
|
||||||
let precedenceList: string[] = [];
|
if (!selectedProvider) {
|
||||||
|
// No provider configured, hide validation
|
||||||
if (precedenceStr) {
|
validationWarning.style.display = 'none';
|
||||||
if (precedenceStr.startsWith('[') && precedenceStr.endsWith(']')) {
|
return;
|
||||||
try {
|
|
||||||
precedenceList = JSON.parse(precedenceStr);
|
|
||||||
} catch (e) {
|
|
||||||
console.error('Error parsing precedence list:', e);
|
|
||||||
precedenceList = ['openai']; // Default if parsing fails
|
|
||||||
}
|
|
||||||
} else if (precedenceStr.includes(',')) {
|
|
||||||
precedenceList = precedenceStr.split(',').map(p => p.trim());
|
|
||||||
} else {
|
|
||||||
precedenceList = [precedenceStr];
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for configuration issues with providers in the precedence list
|
// Check for configuration issues with the selected provider
|
||||||
const configIssues: string[] = [];
|
const configIssues: string[] = [];
|
||||||
|
|
||||||
// Check each provider in the precedence list for proper configuration
|
if (selectedProvider === 'openai') {
|
||||||
for (const provider of precedenceList) {
|
|
||||||
if (provider === 'openai') {
|
|
||||||
// Check OpenAI configuration
|
// Check OpenAI configuration
|
||||||
const apiKey = options.get('openaiApiKey');
|
const apiKey = options.get('openaiApiKey');
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
configIssues.push(`OpenAI API key is missing`);
|
configIssues.push(`OpenAI API key is missing`);
|
||||||
}
|
}
|
||||||
} else if (provider === 'anthropic') {
|
} else if (selectedProvider === 'anthropic') {
|
||||||
// Check Anthropic configuration
|
// Check Anthropic configuration
|
||||||
const apiKey = options.get('anthropicApiKey');
|
const apiKey = options.get('anthropicApiKey');
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
configIssues.push(`Anthropic API key is missing`);
|
configIssues.push(`Anthropic API key is missing`);
|
||||||
}
|
}
|
||||||
} else if (provider === 'ollama') {
|
} else if (selectedProvider === 'ollama') {
|
||||||
// Check Ollama configuration
|
// Check Ollama configuration
|
||||||
const baseUrl = options.get('ollamaBaseUrl');
|
const baseUrl = options.get('ollamaBaseUrl');
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
configIssues.push(`Ollama Base URL is missing`);
|
configIssues.push(`Ollama Base URL is missing`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Add checks for other providers as needed
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch embedding stats to check if there are any notes being processed
|
// Fetch embedding stats to check if there are any notes being processed
|
||||||
const embeddingStats = await getEmbeddingStats() as {
|
const embeddingStats = await getEmbeddingStats() as {
|
||||||
|
@ -36,12 +36,14 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
ollama: new OllamaService()
|
ollama: new OllamaService()
|
||||||
};
|
};
|
||||||
|
|
||||||
private providerOrder: ServiceProviders[] = ['openai', 'anthropic', 'ollama']; // Default order
|
private currentChatProvider: ServiceProviders | null = null; // No default
|
||||||
|
private currentChatService: AIService | null = null; // Current active service
|
||||||
|
private currentEmbeddingProvider: string | null = null; // No default
|
||||||
private initialized = false;
|
private initialized = false;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
// Initialize provider order immediately
|
// Initialize provider immediately
|
||||||
this.updateProviderOrder();
|
this.updateCurrentProvider();
|
||||||
|
|
||||||
// Initialize tools immediately
|
// Initialize tools immediately
|
||||||
this.initializeTools().catch(error => {
|
this.initializeTools().catch(error => {
|
||||||
@ -71,68 +73,47 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update the provider precedence order from saved options
|
* Update the current provider from saved options
|
||||||
* Returns true if successful, false if options not available yet
|
* Returns true if successful, false if options not available yet
|
||||||
*/
|
*/
|
||||||
updateProviderOrder(): boolean {
|
updateCurrentProvider(): boolean {
|
||||||
if (this.initialized) {
|
if (this.initialized) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Default precedence: openai, anthropic, ollama
|
// Always get selected chat provider from options
|
||||||
const defaultOrder: ServiceProviders[] = ['openai', 'anthropic', 'ollama'];
|
const selectedChatProvider = options.getOption('aiChatProvider');
|
||||||
|
if (!selectedChatProvider) {
|
||||||
// Get custom order from options
|
throw new Error('No chat provider configured. Please set aiChatProvider option.');
|
||||||
const customOrder = options.getOption('aiProviderPrecedence');
|
|
||||||
|
|
||||||
if (customOrder) {
|
|
||||||
try {
|
|
||||||
// Try to parse as JSON first
|
|
||||||
let parsed;
|
|
||||||
|
|
||||||
// Handle both array in JSON format and simple string format
|
|
||||||
if (customOrder.startsWith('[') && customOrder.endsWith(']')) {
|
|
||||||
parsed = JSON.parse(customOrder);
|
|
||||||
} else if (typeof customOrder === 'string') {
|
|
||||||
// If it's a string with commas, split it
|
|
||||||
if (customOrder.includes(',')) {
|
|
||||||
parsed = customOrder.split(',').map(p => p.trim());
|
|
||||||
} else {
|
|
||||||
// If it's a simple string (like "ollama"), convert to single-item array
|
|
||||||
parsed = [customOrder];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Fallback to default
|
|
||||||
parsed = defaultOrder;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate that all providers are valid
|
if (!Object.keys(this.services).includes(selectedChatProvider)) {
|
||||||
if (Array.isArray(parsed) &&
|
throw new Error(`Invalid chat provider '${selectedChatProvider}'. Valid providers are: ${Object.keys(this.services).join(', ')}`);
|
||||||
parsed.every(p => Object.keys(this.services).includes(p))) {
|
|
||||||
this.providerOrder = parsed as ServiceProviders[];
|
|
||||||
} else {
|
|
||||||
log.info('Invalid AI provider precedence format, using defaults');
|
|
||||||
this.providerOrder = defaultOrder;
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
|
||||||
log.error(`Failed to parse AI provider precedence: ${e}`);
|
this.currentChatProvider = selectedChatProvider as ServiceProviders;
|
||||||
this.providerOrder = defaultOrder;
|
this.currentChatService = this.services[this.currentChatProvider];
|
||||||
}
|
|
||||||
} else {
|
// Always get selected embedding provider from options
|
||||||
this.providerOrder = defaultOrder;
|
const selectedEmbeddingProvider = options.getOption('aiEmbeddingProvider');
|
||||||
|
if (!selectedEmbeddingProvider) {
|
||||||
|
throw new Error('No embedding provider configured. Please set aiEmbeddingProvider option.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.currentEmbeddingProvider = selectedEmbeddingProvider;
|
||||||
|
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
|
log.info(`AI Service Manager initialized with chat provider: ${this.currentChatProvider}, embedding provider: ${this.currentEmbeddingProvider}`);
|
||||||
// Remove the validateEmbeddingProviders call since we now do validation on the client
|
|
||||||
// this.validateEmbeddingProviders();
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// If options table doesn't exist yet, use defaults
|
// If options table doesn't exist yet or providers not configured
|
||||||
// This happens during initial database creation
|
// This happens during initial database creation
|
||||||
this.providerOrder = ['openai', 'anthropic', 'ollama'];
|
log.error(`Failed to initialize AI providers: ${error}`);
|
||||||
|
this.currentChatProvider = null;
|
||||||
|
this.currentChatService = null;
|
||||||
|
this.currentEmbeddingProvider = null;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -152,52 +133,35 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get precedence list from options
|
// Get selected provider from options
|
||||||
let precedenceList: string[] = ['openai']; // Default to openai if not set
|
const selectedProvider = await options.getOption('aiChatProvider');
|
||||||
const precedenceOption = await options.getOption('aiProviderPrecedence');
|
if (!selectedProvider) {
|
||||||
|
throw new Error('No chat provider configured');
|
||||||
if (precedenceOption) {
|
|
||||||
try {
|
|
||||||
if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
|
|
||||||
precedenceList = JSON.parse(precedenceOption);
|
|
||||||
} else if (typeof precedenceOption === 'string') {
|
|
||||||
if (precedenceOption.includes(',')) {
|
|
||||||
precedenceList = precedenceOption.split(',').map(p => p.trim());
|
|
||||||
} else {
|
|
||||||
precedenceList = [precedenceOption];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
log.error(`Error parsing precedence list: ${e}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for configuration issues with providers in the precedence list
|
// Check for configuration issues with the selected provider
|
||||||
const configIssues: string[] = [];
|
const configIssues: string[] = [];
|
||||||
|
|
||||||
// Check each provider in the precedence list for proper configuration
|
// Check the selected provider for proper configuration
|
||||||
for (const provider of precedenceList) {
|
if (selectedProvider === 'openai') {
|
||||||
if (provider === 'openai') {
|
|
||||||
// Check OpenAI configuration
|
// Check OpenAI configuration
|
||||||
const apiKey = await options.getOption('openaiApiKey');
|
const apiKey = await options.getOption('openaiApiKey');
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
configIssues.push(`OpenAI API key is missing`);
|
configIssues.push(`OpenAI API key is missing`);
|
||||||
}
|
}
|
||||||
} else if (provider === 'anthropic') {
|
} else if (selectedProvider === 'anthropic') {
|
||||||
// Check Anthropic configuration
|
// Check Anthropic configuration
|
||||||
const apiKey = await options.getOption('anthropicApiKey');
|
const apiKey = await options.getOption('anthropicApiKey');
|
||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
configIssues.push(`Anthropic API key is missing`);
|
configIssues.push(`Anthropic API key is missing`);
|
||||||
}
|
}
|
||||||
} else if (provider === 'ollama') {
|
} else if (selectedProvider === 'ollama') {
|
||||||
// Check Ollama configuration
|
// Check Ollama configuration
|
||||||
const baseUrl = await options.getOption('ollamaBaseUrl');
|
const baseUrl = await options.getOption('ollamaBaseUrl');
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
configIssues.push(`Ollama Base URL is missing`);
|
configIssues.push(`Ollama Base URL is missing`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Add checks for other providers as needed
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return warning message if there are configuration issues
|
// Return warning message if there are configuration issues
|
||||||
if (configIssues.length > 0) {
|
if (configIssues.length > 0) {
|
||||||
@ -227,7 +191,7 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
*/
|
*/
|
||||||
private ensureInitialized() {
|
private ensureInitialized() {
|
||||||
if (!this.initialized) {
|
if (!this.initialized) {
|
||||||
this.updateProviderOrder();
|
this.updateCurrentProvider();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,8 +213,7 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate a chat completion response using the first available AI service
|
* Generate a chat completion response using the current AI service
|
||||||
* based on the configured precedence order
|
|
||||||
*/
|
*/
|
||||||
async generateChatCompletion(messages: Message[], options: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
async generateChatCompletion(messages: Message[], options: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
||||||
this.ensureInitialized();
|
this.ensureInitialized();
|
||||||
@ -266,51 +229,48 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
throw new Error('No messages provided for chat completion');
|
throw new Error('No messages provided for chat completion');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try providers in order of preference
|
// If a specific provider is requested via model prefix, use it temporarily
|
||||||
const availableProviders = this.getAvailableProviders();
|
|
||||||
|
|
||||||
if (availableProviders.length === 0) {
|
|
||||||
throw new Error('No AI providers are available. Please check your AI settings.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort available providers by precedence
|
|
||||||
const sortedProviders = this.providerOrder
|
|
||||||
.filter(provider => availableProviders.includes(provider));
|
|
||||||
|
|
||||||
// If a specific provider is requested and available, use it
|
|
||||||
if (options.model && options.model.includes(':')) {
|
if (options.model && options.model.includes(':')) {
|
||||||
const [providerName, modelName] = options.model.split(':');
|
const [providerName, modelName] = options.model.split(':');
|
||||||
|
|
||||||
if (availableProviders.includes(providerName as ServiceProviders)) {
|
if (this.services[providerName as ServiceProviders]?.isAvailable()) {
|
||||||
try {
|
try {
|
||||||
const modifiedOptions = { ...options, model: modelName };
|
const modifiedOptions = { ...options, model: modelName };
|
||||||
log.info(`[AIServiceManager] Using provider ${providerName} from model prefix with modifiedOptions.stream: ${modifiedOptions.stream}`);
|
log.info(`[AIServiceManager] Using provider ${providerName} from model prefix with modifiedOptions.stream: ${modifiedOptions.stream}`);
|
||||||
return await this.services[providerName as ServiceProviders].generateChatCompletion(messages, modifiedOptions);
|
return await this.services[providerName as ServiceProviders].generateChatCompletion(messages, modifiedOptions);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(`Error with specified provider ${providerName}: ${error}`);
|
log.error(`Error with specified provider ${providerName}: ${error}`);
|
||||||
// If the specified provider fails, continue with the fallback providers
|
throw new Error(`Provider ${providerName} failed: ${error}`);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error(`Requested provider ${providerName} is not available`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try each provider in order until one succeeds
|
// Ensure we have a configured service
|
||||||
let lastError: Error | null = null;
|
if (!this.currentChatProvider || !this.currentChatService) {
|
||||||
|
// Try to initialize again in case options were updated
|
||||||
|
this.initialized = false;
|
||||||
|
this.updateCurrentProvider();
|
||||||
|
|
||||||
|
if (!this.currentChatProvider || !this.currentChatService) {
|
||||||
|
throw new Error('No chat provider configured. Please configure aiChatProvider in AI settings.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.currentChatService.isAvailable()) {
|
||||||
|
throw new Error(`Configured chat provider '${this.currentChatProvider}' is not available. Please check your AI settings.`);
|
||||||
|
}
|
||||||
|
|
||||||
for (const provider of sortedProviders) {
|
|
||||||
try {
|
try {
|
||||||
log.info(`[AIServiceManager] Trying provider ${provider} with options.stream: ${options.stream}`);
|
log.info(`[AIServiceManager] Using current chat service (${this.currentChatProvider}) with options.stream: ${options.stream}`);
|
||||||
return await this.services[provider].generateChatCompletion(messages, options);
|
return await this.currentChatService.generateChatCompletion(messages, options);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(`Error with provider ${provider}: ${error}`);
|
log.error(`Error with provider ${this.currentChatProvider}: ${error}`);
|
||||||
lastError = error as Error;
|
throw new Error(`Chat provider ${this.currentChatProvider} failed: ${error}`);
|
||||||
// Continue to the next provider
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we get here, all providers failed
|
|
||||||
throw new Error(`All AI providers failed: ${lastError?.message || 'Unknown error'}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
setupEventListeners() {
|
setupEventListeners() {
|
||||||
// Setup event listeners for AI services
|
// Setup event listeners for AI services
|
||||||
}
|
}
|
||||||
@ -406,21 +366,8 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get provider precedence list
|
// Get selected embedding provider
|
||||||
const precedenceOption = await options.getOption('embeddingProviderPrecedence');
|
const selectedProvider = await options.getOption('aiEmbeddingProvider') || 'openai';
|
||||||
let precedenceList: string[] = [];
|
|
||||||
|
|
||||||
if (precedenceOption) {
|
|
||||||
if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
|
|
||||||
precedenceList = JSON.parse(precedenceOption);
|
|
||||||
} else if (typeof precedenceOption === 'string') {
|
|
||||||
if (precedenceOption.includes(',')) {
|
|
||||||
precedenceList = precedenceOption.split(',').map(p => p.trim());
|
|
||||||
} else {
|
|
||||||
precedenceList = [precedenceOption];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we have enabled providers
|
// Check if we have enabled providers
|
||||||
const enabledProviders = await getEnabledEmbeddingProviders();
|
const enabledProviders = await getEnabledEmbeddingProviders();
|
||||||
@ -572,17 +519,13 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
return this.services[provider as ServiceProviders];
|
return this.services[provider as ServiceProviders];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, use the first available provider in the configured order
|
// Otherwise, use the current chat service
|
||||||
for (const providerName of this.providerOrder) {
|
if (this.currentChatService && this.currentChatService.isAvailable()) {
|
||||||
const service = this.services[providerName];
|
return this.currentChatService;
|
||||||
if (service.isAvailable()) {
|
|
||||||
return service;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no provider is available, use first one anyway (it will throw an error)
|
// If current service is not available, throw an error
|
||||||
// This allows us to show a proper error message rather than "provider not found"
|
throw new Error(`Configured chat provider '${this.currentChatProvider}' is not available`);
|
||||||
return this.services[this.providerOrder[0]];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -590,16 +533,40 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
*/
|
*/
|
||||||
getPreferredProvider(): string {
|
getPreferredProvider(): string {
|
||||||
this.ensureInitialized();
|
this.ensureInitialized();
|
||||||
|
if (!this.currentChatProvider) {
|
||||||
// Return the first available provider in the order
|
throw new Error('No chat provider configured');
|
||||||
for (const providerName of this.providerOrder) {
|
|
||||||
if (this.services[providerName].isAvailable()) {
|
|
||||||
return providerName;
|
|
||||||
}
|
}
|
||||||
|
return this.currentChatProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the first provider as fallback
|
/**
|
||||||
return this.providerOrder[0];
|
* Get the current chat service
|
||||||
|
*/
|
||||||
|
getCurrentChatService(): AIService | null {
|
||||||
|
this.ensureInitialized();
|
||||||
|
return this.currentChatService;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current chat provider name
|
||||||
|
*/
|
||||||
|
getCurrentChatProvider(): string {
|
||||||
|
this.ensureInitialized();
|
||||||
|
if (!this.currentChatProvider) {
|
||||||
|
throw new Error('No chat provider configured');
|
||||||
|
}
|
||||||
|
return this.currentChatProvider;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current embedding provider name
|
||||||
|
*/
|
||||||
|
getCurrentEmbeddingProvider(): string {
|
||||||
|
this.ensureInitialized();
|
||||||
|
if (!this.currentEmbeddingProvider) {
|
||||||
|
throw new Error('No embedding provider configured');
|
||||||
|
}
|
||||||
|
return this.currentEmbeddingProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -609,6 +576,25 @@ export class AIServiceManager implements IAIServiceManager {
|
|||||||
return this.services[provider as ServiceProviders]?.isAvailable() ?? false;
|
return this.services[provider as ServiceProviders]?.isAvailable() ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reinitialize the service manager when provider settings change
|
||||||
|
* This will update the current provider selection and service objects
|
||||||
|
*/
|
||||||
|
async reinitialize(): Promise<void> {
|
||||||
|
log.info('Reinitializing AI Service Manager due to provider change');
|
||||||
|
|
||||||
|
// Reset initialization flag to force update
|
||||||
|
this.initialized = false;
|
||||||
|
|
||||||
|
// Update current provider and service objects from options
|
||||||
|
this.updateCurrentProvider();
|
||||||
|
|
||||||
|
// Re-validate providers if needed
|
||||||
|
await this.validateEmbeddingProviders();
|
||||||
|
|
||||||
|
log.info(`AI Service Manager reinitialized with chat provider: ${this.currentChatProvider}, embedding provider: ${this.currentEmbeddingProvider}`);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get metadata about a provider
|
* Get metadata about a provider
|
||||||
*/
|
*/
|
||||||
@ -723,6 +709,18 @@ export default {
|
|||||||
},
|
},
|
||||||
getProviderMetadata(provider: string): ProviderMetadata | null {
|
getProviderMetadata(provider: string): ProviderMetadata | null {
|
||||||
return getInstance().getProviderMetadata(provider);
|
return getInstance().getProviderMetadata(provider);
|
||||||
|
},
|
||||||
|
async reinitialize(): Promise<void> {
|
||||||
|
return getInstance().reinitialize();
|
||||||
|
},
|
||||||
|
getCurrentChatService(): AIService | null {
|
||||||
|
return getInstance().getCurrentChatService();
|
||||||
|
},
|
||||||
|
getCurrentChatProvider(): string {
|
||||||
|
return getInstance().getCurrentChatProvider();
|
||||||
|
},
|
||||||
|
getCurrentEmbeddingProvider(): string {
|
||||||
|
return getInstance().getCurrentEmbeddingProvider();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -8,53 +8,26 @@ import { getEmbeddingProvider, getEnabledEmbeddingProviders } from '../../provid
|
|||||||
export class ProviderManager {
|
export class ProviderManager {
|
||||||
/**
|
/**
|
||||||
* Get the preferred embedding provider based on user settings
|
* Get the preferred embedding provider based on user settings
|
||||||
* Tries to use the most appropriate provider in this order:
|
|
||||||
* 1. User's configured default provider
|
|
||||||
* 2. OpenAI if API key is set
|
|
||||||
* 3. Anthropic if API key is set
|
|
||||||
* 4. Ollama if configured
|
|
||||||
* 5. Any available provider
|
|
||||||
* 6. Local provider as fallback
|
|
||||||
*
|
*
|
||||||
* @returns The preferred embedding provider or null if none available
|
* @returns The preferred embedding provider or null if none available
|
||||||
*/
|
*/
|
||||||
async getPreferredEmbeddingProvider(): Promise<any> {
|
async getPreferredEmbeddingProvider(): Promise<any> {
|
||||||
try {
|
try {
|
||||||
// Try to get providers based on precedence list
|
// Get the selected embedding provider
|
||||||
const precedenceOption = await options.getOption('embeddingProviderPrecedence');
|
const selectedProvider = await options.getOption('aiEmbeddingProvider');
|
||||||
let precedenceList: string[] = [];
|
if (!selectedProvider) {
|
||||||
|
throw new Error('No embedding provider configured. Please set aiEmbeddingProvider option.');
|
||||||
if (precedenceOption) {
|
|
||||||
if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
|
|
||||||
precedenceList = JSON.parse(precedenceOption);
|
|
||||||
} else if (typeof precedenceOption === 'string') {
|
|
||||||
if (precedenceOption.includes(',')) {
|
|
||||||
precedenceList = precedenceOption.split(',').map(p => p.trim());
|
|
||||||
} else {
|
|
||||||
precedenceList = [precedenceOption];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try each provider in the precedence list
|
// Try to get the selected provider
|
||||||
for (const providerId of precedenceList) {
|
const provider = await getEmbeddingProvider(selectedProvider);
|
||||||
const provider = await getEmbeddingProvider(providerId);
|
|
||||||
if (provider) {
|
if (provider) {
|
||||||
log.info(`Using embedding provider from precedence list: ${providerId}`);
|
log.info(`Using selected embedding provider: ${selectedProvider}`);
|
||||||
return provider;
|
return provider;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// If no provider from precedence list is available, try any enabled provider
|
// If selected provider is not available, throw error
|
||||||
const providers = await getEnabledEmbeddingProviders();
|
throw new Error(`Selected embedding provider '${selectedProvider}' is not available. Please check your AI settings.`);
|
||||||
if (providers.length > 0) {
|
|
||||||
log.info(`Using available embedding provider: ${providers[0].name}`);
|
|
||||||
return providers[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Last resort is local provider
|
|
||||||
log.info('Using local embedding provider as fallback');
|
|
||||||
return await getEmbeddingProvider('local');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(`Error getting preferred embedding provider: ${error}`);
|
log.error(`Error getting preferred embedding provider: ${error}`);
|
||||||
return null;
|
return null;
|
||||||
|
@ -86,52 +86,44 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get default provider and model based on precedence
|
// Get current provider and model from AIServiceManager
|
||||||
let defaultProvider = 'openai';
|
let currentProvider: string;
|
||||||
let defaultModelName = 'gpt-3.5-turbo';
|
let defaultModelName: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get provider precedence list
|
currentProvider = aiServiceManager.getCurrentChatProvider();
|
||||||
const providerPrecedence = await options.getOption('aiProviderPrecedence');
|
} catch (error) {
|
||||||
if (providerPrecedence) {
|
// Provider not configured, try to get from options
|
||||||
// Parse provider precedence list
|
const provider = await options.getOption('aiChatProvider');
|
||||||
let providers: string[] = [];
|
if (!provider) {
|
||||||
if (providerPrecedence.includes(',')) {
|
throw new Error('No chat provider configured. Please configure AI settings.');
|
||||||
providers = providerPrecedence.split(',').map(p => p.trim());
|
}
|
||||||
} else if (providerPrecedence.startsWith('[') && providerPrecedence.endsWith(']')) {
|
currentProvider = provider;
|
||||||
providers = JSON.parse(providerPrecedence);
|
|
||||||
} else {
|
|
||||||
providers = [providerPrecedence];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for first available provider
|
// Get provider-specific default model from options
|
||||||
if (providers.length > 0) {
|
if (currentProvider === 'openai') {
|
||||||
const firstProvider = providers[0];
|
defaultModelName = await options.getOption('openaiDefaultModel');
|
||||||
defaultProvider = firstProvider;
|
if (!defaultModelName) {
|
||||||
|
throw new Error('OpenAI default model not configured. Please set openaiDefaultModel option.');
|
||||||
// Get provider-specific default model
|
}
|
||||||
if (firstProvider === 'openai') {
|
} else if (currentProvider === 'anthropic') {
|
||||||
const model = await options.getOption('openaiDefaultModel');
|
defaultModelName = await options.getOption('anthropicDefaultModel');
|
||||||
if (model) defaultModelName = model;
|
if (!defaultModelName) {
|
||||||
} else if (firstProvider === 'anthropic') {
|
throw new Error('Anthropic default model not configured. Please set anthropicDefaultModel option.');
|
||||||
const model = await options.getOption('anthropicDefaultModel');
|
}
|
||||||
if (model) defaultModelName = model;
|
} else if (currentProvider === 'ollama') {
|
||||||
} else if (firstProvider === 'ollama') {
|
defaultModelName = await options.getOption('ollamaDefaultModel');
|
||||||
const model = await options.getOption('ollamaDefaultModel');
|
if (!defaultModelName) {
|
||||||
if (model) {
|
throw new Error('Ollama default model not configured. Please set ollamaDefaultModel option.');
|
||||||
defaultModelName = model;
|
}
|
||||||
|
|
||||||
// Enable tools for all Ollama models
|
// Enable tools for all Ollama models
|
||||||
// The Ollama API will handle models that don't support tool calling
|
// The Ollama API will handle models that don't support tool calling
|
||||||
log.info(`Using Ollama model ${model} with tool calling enabled`);
|
log.info(`Using Ollama model ${defaultModelName} with tool calling enabled`);
|
||||||
updatedOptions.enableTools = true;
|
updatedOptions.enableTools = true;
|
||||||
}
|
} else {
|
||||||
}
|
throw new Error(`Unknown provider '${currentProvider}'. Cannot determine default model.`);
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// If any error occurs, use the fallback default
|
|
||||||
log.error(`Error determining default model: ${error}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine query complexity
|
// Determine query complexity
|
||||||
@ -162,13 +154,13 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
|||||||
|
|
||||||
// Set the model and add provider metadata
|
// Set the model and add provider metadata
|
||||||
updatedOptions.model = defaultModelName;
|
updatedOptions.model = defaultModelName;
|
||||||
this.addProviderMetadata(updatedOptions, defaultProvider, defaultModelName);
|
this.addProviderMetadata(updatedOptions, currentProvider, defaultModelName);
|
||||||
|
|
||||||
log.info(`Selected model: ${defaultModelName} from provider: ${defaultProvider} for query complexity: ${queryComplexity}`);
|
log.info(`Selected model: ${defaultModelName} from provider: ${currentProvider} for query complexity: ${queryComplexity}`);
|
||||||
log.info(`[ModelSelectionStage] Final options: ${JSON.stringify({
|
log.info(`[ModelSelectionStage] Final options: ${JSON.stringify({
|
||||||
model: updatedOptions.model,
|
model: updatedOptions.model,
|
||||||
stream: updatedOptions.stream,
|
stream: updatedOptions.stream,
|
||||||
provider: defaultProvider,
|
provider: currentProvider,
|
||||||
enableTools: updatedOptions.enableTools
|
enableTools: updatedOptions.enableTools
|
||||||
})}`);
|
})}`);
|
||||||
|
|
||||||
@ -207,19 +199,10 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no provider could be determined, try to use precedence
|
// If no provider could be determined, use the current provider
|
||||||
let selectedProvider = provider;
|
let selectedProvider = provider;
|
||||||
if (!selectedProvider) {
|
if (!selectedProvider) {
|
||||||
// List of providers in precedence order
|
selectedProvider = aiServiceManager.getCurrentChatProvider();
|
||||||
const providerPrecedence = ['anthropic', 'openai', 'ollama'];
|
|
||||||
|
|
||||||
// Find the first available provider
|
|
||||||
for (const p of providerPrecedence) {
|
|
||||||
if (aiServiceManager.isProviderAvailable(p)) {
|
|
||||||
selectedProvider = p;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the provider metadata in the options
|
// Set the provider metadata in the options
|
||||||
@ -242,32 +225,50 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determine model based on provider precedence
|
* Determine model based on current provider
|
||||||
*/
|
*/
|
||||||
private determineDefaultModel(input: ModelSelectionInput): string {
|
private async determineDefaultModel(input: ModelSelectionInput): Promise<string> {
|
||||||
const providerPrecedence = ['anthropic', 'openai', 'ollama'];
|
let currentProvider: string;
|
||||||
|
try {
|
||||||
// Use only providers that are available
|
currentProvider = aiServiceManager.getCurrentChatProvider();
|
||||||
const availableProviders = providerPrecedence.filter(provider =>
|
} catch (error) {
|
||||||
aiServiceManager.isProviderAvailable(provider));
|
// Provider not initialized, get from options
|
||||||
|
const provider = await options.getOption('aiChatProvider');
|
||||||
if (availableProviders.length === 0) {
|
if (!provider) {
|
||||||
throw new Error('No AI providers are available');
|
throw new Error('No chat provider configured');
|
||||||
|
}
|
||||||
|
currentProvider = provider;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the first available provider and its default model
|
const service = aiServiceManager.getCurrentChatService();
|
||||||
const defaultProvider = availableProviders[0] as 'openai' | 'anthropic' | 'ollama' | 'local';
|
if (!service || !service.isAvailable()) {
|
||||||
let defaultModel = 'gpt-3.5-turbo'; // Use model from our constants
|
throw new Error(`Current AI provider '${currentProvider}' is not available`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the default model from options based on provider
|
||||||
|
let defaultModel: string | null = null;
|
||||||
|
|
||||||
|
if (currentProvider === 'openai') {
|
||||||
|
defaultModel = await options.getOption('openaiDefaultModel');
|
||||||
|
} else if (currentProvider === 'anthropic') {
|
||||||
|
defaultModel = await options.getOption('anthropicDefaultModel');
|
||||||
|
} else if (currentProvider === 'ollama') {
|
||||||
|
defaultModel = await options.getOption('ollamaDefaultModel');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!defaultModel) {
|
||||||
|
throw new Error(`No default model configured for provider '${currentProvider}'`);
|
||||||
|
}
|
||||||
|
|
||||||
// Set provider metadata
|
// Set provider metadata
|
||||||
if (!input.options.providerMetadata) {
|
if (!input.options.providerMetadata) {
|
||||||
input.options.providerMetadata = {
|
input.options.providerMetadata = {
|
||||||
provider: defaultProvider,
|
provider: currentProvider as 'openai' | 'anthropic' | 'ollama' | 'local',
|
||||||
modelId: defaultModel
|
modelId: defaultModel
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(`Selected default model ${defaultModel} from provider ${defaultProvider}`);
|
log.info(`Selected default model ${defaultModel} from provider ${currentProvider}`);
|
||||||
return defaultModel;
|
return defaultModel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user