import BasicWidget from "./basic_widget.js"; import toastService from "../services/toast.js"; import server from "../services/server.js"; import appContext from "../components/app_context.js"; import utils from "../services/utils.js"; import { t } from "../services/i18n.js"; import libraryLoader from "../services/library_loader.js"; import { applySyntaxHighlight } from "../services/syntax_highlight.js"; import options from "../services/options.js"; // Import the LLM Chat CSS (async function() { await libraryLoader.requireCss('stylesheets/llm_chat.css'); })(); const TPL = `
Options:
`; interface ChatResponse { id: string; messages: Array<{role: string; content: string}>; sources?: Array<{noteId: string; title: string}>; } interface SessionResponse { id: string; title: string; } export default class LlmChatPanel extends BasicWidget { private noteContextChatMessages!: HTMLElement; private noteContextChatForm!: HTMLFormElement; private noteContextChatInput!: HTMLTextAreaElement; private noteContextChatSendButton!: HTMLButtonElement; private chatContainer!: HTMLElement; private loadingIndicator!: HTMLElement; private sourcesList!: HTMLElement; private useAdvancedContextCheckbox!: HTMLInputElement; private showThinkingCheckbox!: HTMLInputElement; private validationWarning!: HTMLElement; private sessionId: string | null = null; private currentNoteId: string | null = null; doRender() { this.$widget = $(TPL); const element = this.$widget[0]; this.noteContextChatMessages = element.querySelector('.note-context-chat-messages') as HTMLElement; this.noteContextChatForm = element.querySelector('.note-context-chat-form') as HTMLFormElement; this.noteContextChatInput = element.querySelector('.note-context-chat-input') as HTMLTextAreaElement; this.noteContextChatSendButton = element.querySelector('.note-context-chat-send-button') as HTMLButtonElement; this.chatContainer = element.querySelector('.note-context-chat-container') as HTMLElement; this.loadingIndicator = element.querySelector('.loading-indicator') as HTMLElement; this.sourcesList = element.querySelector('.sources-list') as HTMLElement; this.useAdvancedContextCheckbox = element.querySelector('.use-advanced-context-checkbox') as HTMLInputElement; this.showThinkingCheckbox = element.querySelector('.show-thinking-checkbox') as HTMLInputElement; this.validationWarning = element.querySelector('.provider-validation-warning') as HTMLElement; // Set up event delegation for the settings link this.validationWarning.addEventListener('click', (e) => { const target = e.target as HTMLElement; if (target.classList.contains('settings-link') || target.closest('.settings-link')) { console.log('Settings link clicked, navigating to AI settings URL'); window.location.href = '#root/_hidden/_options/_optionsAi'; } }); this.initializeEventListeners(); // Create a session when first loaded this.createChatSession(); return this.$widget; } async refresh() { if (!this.isVisible()) { return; } // Check for any provider validation issues when refreshing await this.validateEmbeddingProviders(); // Get current note context if needed this.currentNoteId = appContext.tabManager.getActiveContext()?.note?.noteId || null; if (!this.sessionId) { // Create a new chat session await this.createChatSession(); } } private async createChatSession() { // Check for validation issues first await this.validateEmbeddingProviders(); try { const resp = await server.post('llm/sessions', { title: 'Note Chat' }); if (resp && resp.id) { this.sessionId = resp.id; } } catch (error) { console.error('Failed to create chat session:', error); toastService.showError('Failed to create chat session'); } } private async sendMessage(content: string) { if (!content.trim() || !this.sessionId) { return; } // Check for provider validation issues before sending await this.validateEmbeddingProviders(); // Add user message to the chat this.addMessageToChat('user', content); this.noteContextChatInput.value = ''; this.showLoadingIndicator(); this.hideSources(); try { const useAdvancedContext = this.useAdvancedContextCheckbox.checked; const showThinking = this.showThinkingCheckbox.checked; // Add logging to verify parameters console.log(`Sending message with: useAdvancedContext=${useAdvancedContext}, showThinking=${showThinking}, noteId=${this.currentNoteId}`); // Create the message parameters const messageParams = { content, contextNoteId: this.currentNoteId, useAdvancedContext, showThinking }; // First, send the message via POST request const postResponse = await server.post(`llm/sessions/${this.sessionId}/messages`, messageParams); // If the POST request returned content directly, display it if (postResponse && postResponse.content) { this.addMessageToChat('assistant', postResponse.content); // If there are sources, show them if (postResponse.sources && postResponse.sources.length > 0) { this.showSources(postResponse.sources); } this.hideLoadingIndicator(); return; } // Then set up streaming via EventSource const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`; const source = new EventSource(streamUrl); let assistantResponse = ''; let receivedAnyContent = false; let timeoutId: number | null = null; // Set a timeout to handle case where streaming doesn't work properly timeoutId = window.setTimeout(() => { if (!receivedAnyContent) { // If we haven't received any content after a reasonable timeout (10 seconds), // add a fallback message and close the stream this.hideLoadingIndicator(); this.addMessageToChat('assistant', 'I\'m having trouble generating a response right now. Please try again later.'); source.close(); } }, 10000); // Handle streaming response source.onmessage = (event) => { if (event.data === '[DONE]') { // Stream completed source.close(); this.hideLoadingIndicator(); // Clear the timeout since we're done if (timeoutId !== null) { window.clearTimeout(timeoutId); } // If we didn't receive any content but the stream completed normally, // display a message to the user if (!receivedAnyContent) { this.addMessageToChat('assistant', 'I processed your request, but I don\'t have any specific information to share at the moment.'); } return; } try { const data = JSON.parse(event.data); console.log("Received streaming data:", data); // Debug log // Handle both content and error cases if (data.content) { receivedAnyContent = true; assistantResponse += data.content; // Update the UI with the accumulated response const assistantElement = this.noteContextChatMessages.querySelector('.assistant-message:last-child .message-content'); if (assistantElement) { assistantElement.innerHTML = this.formatMarkdown(assistantResponse); // Apply syntax highlighting to any code blocks in the updated content applySyntaxHighlight($(assistantElement as HTMLElement)); } else { this.addMessageToChat('assistant', assistantResponse); } } else if (data.error) { // Handle error message this.hideLoadingIndicator(); this.addMessageToChat('assistant', `Error: ${data.error}`); receivedAnyContent = true; source.close(); if (timeoutId !== null) { window.clearTimeout(timeoutId); } } // Scroll to the bottom this.chatContainer.scrollTop = this.chatContainer.scrollHeight; } catch (e) { console.error('Error parsing SSE message:', e, 'Raw data:', event.data); } }; source.onerror = () => { source.close(); this.hideLoadingIndicator(); // Clear the timeout if there was an error if (timeoutId !== null) { window.clearTimeout(timeoutId); } // Only show error message if we haven't received any content yet if (!receivedAnyContent) { this.addMessageToChat('assistant', 'Error connecting to the LLM service. Please try again.'); } }; } catch (error) { this.hideLoadingIndicator(); toastService.showError('Error sending message: ' + (error as Error).message); } } private addMessageToChat(role: 'user' | 'assistant', content: string) { const messageElement = document.createElement('div'); messageElement.className = `chat-message ${role}-message mb-3 d-flex`; const avatarElement = document.createElement('div'); avatarElement.className = 'message-avatar d-flex align-items-center justify-content-center me-2'; if (role === 'user') { avatarElement.innerHTML = ''; avatarElement.classList.add('user-avatar'); } else { avatarElement.innerHTML = ''; avatarElement.classList.add('assistant-avatar'); } const contentElement = document.createElement('div'); contentElement.className = 'message-content p-3 rounded flex-grow-1'; if (role === 'user') { contentElement.classList.add('user-content', 'bg-light'); } else { contentElement.classList.add('assistant-content'); } // Format the content with markdown contentElement.innerHTML = this.formatMarkdown(content); messageElement.appendChild(avatarElement); messageElement.appendChild(contentElement); this.noteContextChatMessages.appendChild(messageElement); // Apply syntax highlighting to any code blocks in the message applySyntaxHighlight($(contentElement)); // Scroll to bottom this.chatContainer.scrollTop = this.chatContainer.scrollHeight; } private showSources(sources: Array<{noteId: string, title: string}>) { this.sourcesList.innerHTML = ''; // Update the sources count const sourcesCount = this.$widget[0].querySelector('.sources-count') as HTMLElement; if (sourcesCount) { sourcesCount.textContent = sources.length.toString(); } sources.forEach(source => { const sourceElement = document.createElement('div'); sourceElement.className = 'source-item p-2 mb-1 border rounded d-flex align-items-center'; // Create the direct link to the note sourceElement.innerHTML = ` `; // Add click handler for better user experience sourceElement.querySelector('.source-link')?.addEventListener('click', (e) => { e.preventDefault(); e.stopPropagation(); // Open the note in a new tab but don't switch to it appContext.tabManager.openTabWithNoteWithHoisting(source.noteId, { activate: false }); return false; // Additional measure to prevent the event from bubbling up }); this.sourcesList.appendChild(sourceElement); }); const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement; if (sourcesContainer) { sourcesContainer.style.display = 'block'; } } private hideSources() { const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement; if (sourcesContainer) { sourcesContainer.style.display = 'none'; } } private showLoadingIndicator() { this.loadingIndicator.style.display = 'flex'; } private hideLoadingIndicator() { this.loadingIndicator.style.display = 'none'; } private initializeEventListeners() { this.noteContextChatForm.addEventListener('submit', (e) => { e.preventDefault(); const content = this.noteContextChatInput.value; this.sendMessage(content); }); // Add auto-resize functionality to the textarea this.noteContextChatInput.addEventListener('input', () => { this.noteContextChatInput.style.height = 'auto'; this.noteContextChatInput.style.height = `${this.noteContextChatInput.scrollHeight}px`; }); // Handle Enter key (send on Enter, new line on Shift+Enter) this.noteContextChatInput.addEventListener('keydown', (e) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); this.noteContextChatForm.dispatchEvent(new Event('submit')); } }); } /** * Format markdown content for display */ private formatMarkdown(content: string): string { if (!content) return ''; // Check if content contains HTML sections for thinking visualization if (content.includes('
') || content.includes('
')) { console.log('Detected thinking process visualization in response'); // For content with HTML thinking visualizations, we need to protect them } // First, extract HTML thinking visualization to protect it from replacements const thinkingBlocks: string[] = []; let processedContent = content.replace(/
/g, (match) => { const placeholder = `__THINKING_BLOCK_${thinkingBlocks.length}__`; thinkingBlocks.push(match); return placeholder; }); // Then extract code blocks to protect them from other replacements const codeBlocks: string[] = []; processedContent = processedContent.replace(/```(\w+)?\n([\s\S]+?)\n```/gs, (match, language, code) => { const placeholder = `__CODE_BLOCK_${codeBlocks.length}__`; const languageClass = language ? ` language-${language}` : ''; codeBlocks.push(`
${code}
`); return placeholder; }); // Apply other markdown formatting processedContent = processedContent .replace(/\*\*(.*?)\*\*/g, '$1') .replace(/\*(.*?)\*/g, '$1') .replace(/`([^`]+)`/g, '$1') .replace(/\n/g, '
'); // Restore code blocks codeBlocks.forEach((block, index) => { processedContent = processedContent.replace(`__CODE_BLOCK_${index}__`, block); }); // Restore thinking visualization blocks thinkingBlocks.forEach((block, index) => { processedContent = processedContent.replace(`__THINKING_BLOCK_${index}__`, block); }); return processedContent; } /** * Validate embedding providers configuration * Check if there are issues with the embedding providers that might affect LLM functionality */ async validateEmbeddingProviders() { try { // Check if AI is enabled const aiEnabled = options.is('aiEnabled'); if (!aiEnabled) { this.validationWarning.style.display = 'none'; return; } // Get the default embedding provider const defaultProvider = options.get('embeddingsDefaultProvider') || 'openai'; // Get provider precedence const precedenceStr = options.get('aiProviderPrecedence') || 'openai,anthropic,ollama'; let precedenceList: string[] = []; if (precedenceStr) { if (precedenceStr.startsWith('[') && precedenceStr.endsWith(']')) { precedenceList = JSON.parse(precedenceStr); } else if (precedenceStr.includes(',')) { precedenceList = precedenceStr.split(',').map(p => p.trim()); } else { precedenceList = [precedenceStr]; } } // Get enabled providers - this is a simplification since we don't have direct DB access // We'll determine enabled status based on the presence of keys or settings const enabledProviders: string[] = []; // OpenAI is enabled if API key is set const openaiKey = options.get('openaiApiKey'); if (openaiKey) { enabledProviders.push('openai'); } // Anthropic is enabled if API key is set const anthropicKey = options.get('anthropicApiKey'); if (anthropicKey) { enabledProviders.push('anthropic'); } // Ollama is enabled if the setting is true const ollamaEnabled = options.is('ollamaEnabled'); if (ollamaEnabled) { enabledProviders.push('ollama'); } // Local is always available enabledProviders.push('local'); // Perform validation checks const defaultInPrecedence = precedenceList.includes(defaultProvider); const defaultIsEnabled = enabledProviders.includes(defaultProvider); const allPrecedenceEnabled = precedenceList.every((p: string) => enabledProviders.includes(p)); // Get embedding queue status const embeddingStats = await server.get('embeddings/stats') as { success: boolean, stats: { totalNotesCount: number; embeddedNotesCount: number; queuedNotesCount: number; failedNotesCount: number; lastProcessedDate: string | null; percentComplete: number; } }; const queuedNotes = embeddingStats?.stats?.queuedNotesCount || 0; const hasEmbeddingsInQueue = queuedNotes > 0; // Show warning if there are issues if (!defaultInPrecedence || !defaultIsEnabled || !allPrecedenceEnabled || hasEmbeddingsInQueue) { let message = 'AI Provider Configuration Issues'; message += '
    '; if (!defaultInPrecedence) { message += `
  • The default embedding provider "${defaultProvider}" is not in your provider precedence list.
  • `; } if (!defaultIsEnabled) { message += `
  • The default embedding provider "${defaultProvider}" is not enabled.
  • `; } if (!allPrecedenceEnabled) { const disabledProviders = precedenceList.filter((p: string) => !enabledProviders.includes(p)); message += `
  • The following providers in your precedence list are not enabled: ${disabledProviders.join(', ')}.
  • `; } if (hasEmbeddingsInQueue) { message += `
  • Currently processing embeddings for ${queuedNotes} notes. Some AI features may produce incomplete results until processing completes.
  • `; } message += '
'; message += ''; // Update HTML content - no need to attach event listeners here anymore this.validationWarning.innerHTML = message; this.validationWarning.style.display = 'block'; } else { this.validationWarning.style.display = 'none'; } } catch (error) { console.error('Error validating embedding providers:', error); this.validationWarning.style.display = 'none'; } } }