feat(llm): redo chat storage, part 3

This commit is contained in:
perf3ct 2025-06-02 15:12:08 +00:00
parent f6af617f6b
commit dcab4caee3
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
3 changed files with 33 additions and 21 deletions

View File

@ -155,7 +155,7 @@ export class AIServiceManager implements IAIServiceManager {
// Get precedence list from options
let precedenceList: string[] = ['openai']; // Default to openai if not set
const precedenceOption = await options.getOption('aiProviderPrecedence');
if (precedenceOption) {
try {
if (precedenceOption.startsWith('[') && precedenceOption.endsWith(']')) {
@ -171,10 +171,10 @@ export class AIServiceManager implements IAIServiceManager {
log.error(`Error parsing precedence list: ${e}`);
}
}
// Check for configuration issues with providers in the precedence list
const configIssues: string[] = [];
// Check each provider in the precedence list for proper configuration
for (const provider of precedenceList) {
if (provider === 'openai') {
@ -198,20 +198,20 @@ export class AIServiceManager implements IAIServiceManager {
}
// Add checks for other providers as needed
}
// Return warning message if there are configuration issues
if (configIssues.length > 0) {
let message = 'There are issues with your AI provider configuration:';
for (const issue of configIssues) {
message += `\n• ${issue}`;
}
message += '\n\nPlease check your AI settings.';
// Log warning to console
log.error('AI Provider Configuration Warning: ' + message);
return message;
}
@ -279,9 +279,19 @@ export class AIServiceManager implements IAIServiceManager {
// If a specific provider is requested and available, use it
if (options.model && options.model.includes(':')) {
const [providerName, modelName] = options.model.split(':');
// Check if this is a provider prefix (e.g., "ollama:qwen3:30b")
// vs a model name with version (e.g., "qwen3:30b")
const parts = options.model.split(':');
// Only treat as provider:model if the first part is a known provider
const knownProviders = ['openai', 'anthropic', 'ollama', 'local'];
const potentialProvider = parts[0];
if (knownProviders.includes(potentialProvider) && availableProviders.includes(potentialProvider as ServiceProviders)) {
// This is a provider:model format
const providerName = potentialProvider;
const modelName = parts.slice(1).join(':'); // Rejoin the rest as model name
if (availableProviders.includes(providerName as ServiceProviders)) {
try {
const modifiedOptions = { ...options, model: modelName };
log.info(`[AIServiceManager] Using provider ${providerName} from model prefix with modifiedOptions.stream: ${modifiedOptions.stream}`);
@ -291,6 +301,7 @@ export class AIServiceManager implements IAIServiceManager {
// If the specified provider fails, continue with the fallback providers
}
}
// If not a provider prefix, treat the entire string as a model name and continue with normal provider selection
}
// Try each provider in order until one succeeds

View File

@ -20,44 +20,44 @@ export class MessagePreparationStage extends BasePipelineStage<MessagePreparatio
*/
protected async process(input: MessagePreparationInput): Promise<{ messages: Message[] }> {
const { messages, context, systemPrompt, options } = input;
// Determine provider from model string if available (format: "provider:model")
let provider = 'default';
if (options?.model && options.model.includes(':')) {
const [providerName] = options.model.split(':');
provider = providerName;
}
// Check if tools are enabled
const toolsEnabled = options?.enableTools === true;
log.info(`Preparing messages for provider: ${provider}, context: ${!!context}, system prompt: ${!!systemPrompt}, tools: ${toolsEnabled}`);
// Get appropriate formatter for this provider
const formatter = MessageFormatterFactory.getFormatter(provider);
// Determine the system prompt to use
let finalSystemPrompt = systemPrompt || SYSTEM_PROMPTS.DEFAULT_SYSTEM_PROMPT;
// If tools are enabled, enhance system prompt with tools guidance
if (toolsEnabled) {
const toolCount = toolRegistry.getAllTools().length;
const toolsPrompt = `You have access to ${toolCount} tools to help you respond. When you need information that might be in the user's notes, use the search_notes tool to find relevant content or the read_note tool to read a specific note by ID. Use tools when specific information is required rather than making assumptions.`;
// Add tools guidance to system prompt
finalSystemPrompt = finalSystemPrompt + '\n\n' + toolsPrompt;
log.info(`Enhanced system prompt with tools guidance: ${toolCount} tools available`);
}
// Format messages using provider-specific approach
const formattedMessages = formatter.formatMessages(
messages,
finalSystemPrompt,
context
);
log.info(`Formatted ${messages.length} messages into ${formattedMessages.length} messages for provider: ${provider}`);
return { messages: formattedMessages };
}
}

View File

@ -234,7 +234,8 @@ export class ModelSelectionStage extends BasePipelineStage<ModelSelectionInput,
// For backward compatibility, ensure model name is set without prefix
if (options.model && options.model.includes(':')) {
options.model = modelName || options.model.split(':')[1];
const parsed = this.parseModelIdentifier(options.model);
options.model = modelName || parsed.model;
}
log.info(`Set provider metadata: provider=${selectedProvider}, model=${modelName}`);