import BasicWidget from "./basic_widget.js"; import toastService from "../services/toast.js"; import server from "../services/server.js"; import appContext from "../components/app_context.js"; import utils from "../services/utils.js"; import { t } from "../services/i18n.js"; import libraryLoader from "../services/library_loader.js"; import { applySyntaxHighlight } from "../services/syntax_highlight.js"; import options from "../services/options.js"; import { marked } from "marked"; // Import the LLM Chat CSS (async function() { await libraryLoader.requireCss('stylesheets/llm_chat.css'); })(); const TPL = `
Options:
`; interface ChatResponse { id: string; messages: Array<{role: string; content: string}>; sources?: Array<{noteId: string; title: string}>; } interface SessionResponse { id: string; title: string; } export default class LlmChatPanel extends BasicWidget { private noteContextChatMessages!: HTMLElement; private noteContextChatForm!: HTMLFormElement; private noteContextChatInput!: HTMLTextAreaElement; private noteContextChatSendButton!: HTMLButtonElement; private chatContainer!: HTMLElement; private loadingIndicator!: HTMLElement; private sourcesList!: HTMLElement; private useAdvancedContextCheckbox!: HTMLInputElement; private showThinkingCheckbox!: HTMLInputElement; private validationWarning!: HTMLElement; private sessionId: string | null = null; private currentNoteId: string | null = null; // Callbacks for data persistence private onSaveData: ((data: any) => Promise) | null = null; private onGetData: (() => Promise) | null = null; private messages: Array<{role: string; content: string; timestamp?: Date}> = []; // Public getters and setters for private properties public getCurrentNoteId(): string | null { return this.currentNoteId; } public setCurrentNoteId(noteId: string | null): void { this.currentNoteId = noteId; } public getMessages(): Array<{role: string; content: string; timestamp?: Date}> { return this.messages; } public setMessages(messages: Array<{role: string; content: string; timestamp?: Date}>): void { this.messages = messages; } public getSessionId(): string | null { return this.sessionId; } public setSessionId(sessionId: string | null): void { this.sessionId = sessionId; } public getNoteContextChatMessages(): HTMLElement { return this.noteContextChatMessages; } public clearNoteContextChatMessages(): void { this.noteContextChatMessages.innerHTML = ''; } doRender() { this.$widget = $(TPL); const element = this.$widget[0]; this.noteContextChatMessages = element.querySelector('.note-context-chat-messages') as HTMLElement; this.noteContextChatForm = element.querySelector('.note-context-chat-form') as HTMLFormElement; this.noteContextChatInput = element.querySelector('.note-context-chat-input') as HTMLTextAreaElement; this.noteContextChatSendButton = element.querySelector('.note-context-chat-send-button') as HTMLButtonElement; this.chatContainer = element.querySelector('.note-context-chat-container') as HTMLElement; this.loadingIndicator = element.querySelector('.loading-indicator') as HTMLElement; this.sourcesList = element.querySelector('.sources-list') as HTMLElement; this.useAdvancedContextCheckbox = element.querySelector('.use-advanced-context-checkbox') as HTMLInputElement; this.showThinkingCheckbox = element.querySelector('.show-thinking-checkbox') as HTMLInputElement; this.validationWarning = element.querySelector('.provider-validation-warning') as HTMLElement; // Set up event delegation for the settings link this.validationWarning.addEventListener('click', (e) => { const target = e.target as HTMLElement; if (target.classList.contains('settings-link') || target.closest('.settings-link')) { console.log('Settings link clicked, navigating to AI settings URL'); window.location.href = '#root/_hidden/_options/_optionsAi'; } }); this.initializeEventListeners(); // Don't create a session here - wait for refresh // This prevents the wrong session from being created for the wrong note return this.$widget; } /** * Set the callbacks for data persistence */ setDataCallbacks( saveDataCallback: (data: any) => Promise, getDataCallback: () => Promise ) { this.onSaveData = saveDataCallback; this.onGetData = getDataCallback; } /** * Load saved chat data from the note */ async loadSavedData() { if (!this.onGetData) { console.log("No getData callback available"); return; } try { const data = await this.onGetData(); console.log(`Loading chat data for noteId: ${this.currentNoteId}`, data); // Make sure we're loading data for the correct note if (data && data.noteId && data.noteId !== this.currentNoteId) { console.warn(`Data noteId ${data.noteId} doesn't match current noteId ${this.currentNoteId}`); } if (data && data.messages && Array.isArray(data.messages)) { // Clear existing messages in the UI this.noteContextChatMessages.innerHTML = ''; this.messages = []; // Add each message to the UI data.messages.forEach((message: {role: string; content: string}) => { if (message.role === 'user' || message.role === 'assistant') { this.addMessageToChat(message.role, message.content); // Track messages in our local array too this.messages.push(message); } }); // Scroll to bottom this.chatContainer.scrollTop = this.chatContainer.scrollHeight; console.log(`Successfully loaded ${data.messages.length} messages for noteId: ${this.currentNoteId}`); return true; } } catch (e) { console.error(`Error loading saved chat data for noteId: ${this.currentNoteId}:`, e); } return false; } /** * Save the current chat data to the note */ async saveCurrentData() { if (!this.onSaveData) { console.log("No saveData callback available"); return; } try { // Include the current note ID for tracking purposes await this.onSaveData({ messages: this.messages, lastUpdated: new Date(), noteId: this.currentNoteId // Include the note ID to help with debugging }); console.log(`Saved chat data for noteId: ${this.currentNoteId} with ${this.messages.length} messages`); return true; } catch (e) { console.error(`Error saving chat data for noteId: ${this.currentNoteId}:`, e); return false; } } async refresh() { if (!this.isVisible()) { return; } // Check for any provider validation issues when refreshing await this.validateEmbeddingProviders(); // Get current note context if needed const currentActiveNoteId = appContext.tabManager.getActiveContext()?.note?.noteId || null; // If we're switching to a different note, we need to reset if (this.currentNoteId !== currentActiveNoteId) { console.log(`Note ID changed from ${this.currentNoteId} to ${currentActiveNoteId}, resetting chat panel`); // Reset the UI and data this.noteContextChatMessages.innerHTML = ''; this.messages = []; this.sessionId = null; this.hideSources(); // Hide any sources from previous note // Update our current noteId this.currentNoteId = currentActiveNoteId; } // Always try to load saved data for the current note const hasSavedData = await this.loadSavedData(); // Only create a new session if we don't have a session or saved data if (!this.sessionId || !hasSavedData) { // Create a new chat session await this.createChatSession(); } } private async createChatSession() { // Check for validation issues first await this.validateEmbeddingProviders(); try { const resp = await server.post('llm/sessions', { title: 'Note Chat' }); if (resp && resp.id) { this.sessionId = resp.id; } } catch (error) { console.error('Failed to create chat session:', error); toastService.showError('Failed to create chat session'); } } /** * Handle sending a user message to the LLM service */ private async sendMessage(content: string) { if (!content.trim() || !this.sessionId) { return; } // Check for provider validation issues before sending await this.validateEmbeddingProviders(); // Process the user message await this.processUserMessage(content); // Clear input and show loading state this.noteContextChatInput.value = ''; this.showLoadingIndicator(); this.hideSources(); try { const useAdvancedContext = this.useAdvancedContextCheckbox.checked; const showThinking = this.showThinkingCheckbox.checked; // Add logging to verify parameters console.log(`Sending message with: useAdvancedContext=${useAdvancedContext}, showThinking=${showThinking}, noteId=${this.currentNoteId}`); // Create the message parameters const messageParams = { content, contextNoteId: this.currentNoteId, useAdvancedContext, showThinking }; // First try to get a direct response const handled = await this.handleDirectResponse(messageParams); if (handled) return; // If no direct response, set up streaming await this.setupStreamingResponse(messageParams); } catch (error) { this.handleError(error as Error); } } /** * Process a new user message - add to UI and save */ private async processUserMessage(content: string) { // Add user message to the chat UI this.addMessageToChat('user', content); // Add to our local message array too this.messages.push({ role: 'user', content, timestamp: new Date() }); // Save to note this.saveCurrentData().catch(err => { console.error("Failed to save user message to note:", err); }); } /** * Try to get a direct response from the server * @returns true if response was handled, false if streaming should be used */ private async handleDirectResponse(messageParams: any): Promise { // Send the message via POST request const postResponse = await server.post(`llm/sessions/${this.sessionId}/messages`, messageParams); // If the POST request returned content directly, display it if (postResponse && postResponse.content) { this.processAssistantResponse(postResponse.content); // If there are sources, show them if (postResponse.sources && postResponse.sources.length > 0) { this.showSources(postResponse.sources); } this.hideLoadingIndicator(); return true; } return false; } /** * Process an assistant response - add to UI and save */ private async processAssistantResponse(content: string) { // Add the response to the chat UI this.addMessageToChat('assistant', content); // Add to our local message array too this.messages.push({ role: 'assistant', content, timestamp: new Date() }); // Save to note this.saveCurrentData().catch(err => { console.error("Failed to save assistant response to note:", err); }); } /** * Set up streaming response from the server */ private async setupStreamingResponse(messageParams: any) { const useAdvancedContext = messageParams.useAdvancedContext; const showThinking = messageParams.showThinking; // Set up streaming via EventSource const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`; const source = new EventSource(streamUrl); let assistantResponse = ''; let receivedAnyContent = false; let timeoutId: number | null = null; // Set up timeout for streaming response timeoutId = this.setupStreamingTimeout(source); // Handle streaming response source.onmessage = (event) => this.handleStreamingMessage( event, source, timeoutId, assistantResponse, receivedAnyContent ); // Handle streaming errors source.onerror = () => this.handleStreamingError( source, timeoutId, receivedAnyContent ); } /** * Set up timeout for streaming response * @returns Timeout ID for the created timeout */ private setupStreamingTimeout(source: EventSource): number { // Set a timeout to handle case where streaming doesn't work properly return window.setTimeout(() => { // If we haven't received any content after a reasonable timeout (10 seconds), // add a fallback message and close the stream this.hideLoadingIndicator(); const errorMessage = 'I\'m having trouble generating a response right now. Please try again later.'; this.processAssistantResponse(errorMessage); source.close(); }, 10000); } /** * Handle messages from the streaming response */ private handleStreamingMessage( event: MessageEvent, source: EventSource, timeoutId: number | null, assistantResponse: string, receivedAnyContent: boolean ) { if (event.data === '[DONE]') { this.handleStreamingComplete(source, timeoutId, receivedAnyContent, assistantResponse); return; } try { const data = JSON.parse(event.data); console.log("Received streaming data:", data); // Debug log // Handle both content and error cases if (data.content) { receivedAnyContent = true; assistantResponse += data.content; // Update the UI with the accumulated response this.updateStreamingUI(assistantResponse); } else if (data.error) { // Handle error message this.hideLoadingIndicator(); this.addMessageToChat('assistant', `Error: ${data.error}`); receivedAnyContent = true; source.close(); if (timeoutId !== null) { window.clearTimeout(timeoutId); } } // Scroll to the bottom this.chatContainer.scrollTop = this.chatContainer.scrollHeight; } catch (e) { console.error('Error parsing SSE message:', e, 'Raw data:', event.data); } } /** * Update the UI with streaming content as it arrives */ private updateStreamingUI(assistantResponse: string) { const assistantElement = this.noteContextChatMessages.querySelector('.assistant-message:last-child .message-content'); if (assistantElement) { assistantElement.innerHTML = this.formatMarkdown(assistantResponse); // Apply syntax highlighting to any code blocks in the updated content applySyntaxHighlight($(assistantElement as HTMLElement)); } else { this.addMessageToChat('assistant', assistantResponse); } } /** * Handle completion of streaming response */ private handleStreamingComplete( source: EventSource, timeoutId: number | null, receivedAnyContent: boolean, assistantResponse: string ) { // Stream completed source.close(); this.hideLoadingIndicator(); // Clear the timeout since we're done if (timeoutId !== null) { window.clearTimeout(timeoutId); } // If we didn't receive any content but the stream completed normally, // display a message to the user if (!receivedAnyContent) { const defaultMessage = 'I processed your request, but I don\'t have any specific information to share at the moment.'; this.processAssistantResponse(defaultMessage); } else if (assistantResponse) { // Save the completed streaming response to the message array this.messages.push({ role: 'assistant', content: assistantResponse, timestamp: new Date() }); // Save to note this.saveCurrentData().catch(err => { console.error("Failed to save assistant response to note:", err); }); } } /** * Handle errors during streaming response */ private handleStreamingError( source: EventSource, timeoutId: number | null, receivedAnyContent: boolean ) { source.close(); this.hideLoadingIndicator(); // Clear the timeout if there was an error if (timeoutId !== null) { window.clearTimeout(timeoutId); } // Only show error message if we haven't received any content yet if (!receivedAnyContent) { const connectionError = 'Error connecting to the LLM service. Please try again.'; this.processAssistantResponse(connectionError); } } /** * Handle general errors in the send message flow */ private handleError(error: Error) { this.hideLoadingIndicator(); toastService.showError('Error sending message: ' + error.message); } private addMessageToChat(role: 'user' | 'assistant', content: string) { const messageElement = document.createElement('div'); messageElement.className = `chat-message ${role}-message mb-3 d-flex`; const avatarElement = document.createElement('div'); avatarElement.className = 'message-avatar d-flex align-items-center justify-content-center me-2'; if (role === 'user') { avatarElement.innerHTML = ''; avatarElement.classList.add('user-avatar'); } else { avatarElement.innerHTML = ''; avatarElement.classList.add('assistant-avatar'); } const contentElement = document.createElement('div'); contentElement.className = 'message-content p-3 rounded flex-grow-1'; if (role === 'user') { contentElement.classList.add('user-content', 'bg-light'); } else { contentElement.classList.add('assistant-content'); } // Format the content with markdown contentElement.innerHTML = this.formatMarkdown(content); messageElement.appendChild(avatarElement); messageElement.appendChild(contentElement); this.noteContextChatMessages.appendChild(messageElement); // Apply syntax highlighting to any code blocks in the message applySyntaxHighlight($(contentElement)); // Scroll to bottom this.chatContainer.scrollTop = this.chatContainer.scrollHeight; } private showSources(sources: Array<{noteId: string, title: string}>) { this.sourcesList.innerHTML = ''; // Update the sources count const sourcesCount = this.$widget[0].querySelector('.sources-count') as HTMLElement; if (sourcesCount) { sourcesCount.textContent = sources.length.toString(); } sources.forEach(source => { const sourceElement = document.createElement('div'); sourceElement.className = 'source-item p-2 mb-1 border rounded d-flex align-items-center'; // Create the direct link to the note sourceElement.innerHTML = ` `; // Add click handler for better user experience sourceElement.querySelector('.source-link')?.addEventListener('click', (e) => { e.preventDefault(); e.stopPropagation(); // Open the note in a new tab but don't switch to it appContext.tabManager.openTabWithNoteWithHoisting(source.noteId, { activate: false }); return false; // Additional measure to prevent the event from bubbling up }); this.sourcesList.appendChild(sourceElement); }); const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement; if (sourcesContainer) { sourcesContainer.style.display = 'block'; } } private hideSources() { const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement; if (sourcesContainer) { sourcesContainer.style.display = 'none'; } } private showLoadingIndicator() { this.loadingIndicator.style.display = 'flex'; } private hideLoadingIndicator() { this.loadingIndicator.style.display = 'none'; } private initializeEventListeners() { this.noteContextChatForm.addEventListener('submit', (e) => { e.preventDefault(); const content = this.noteContextChatInput.value; this.sendMessage(content); }); // Add auto-resize functionality to the textarea this.noteContextChatInput.addEventListener('input', () => { this.noteContextChatInput.style.height = 'auto'; this.noteContextChatInput.style.height = `${this.noteContextChatInput.scrollHeight}px`; }); // Handle Enter key (send on Enter, new line on Shift+Enter) this.noteContextChatInput.addEventListener('keydown', (e) => { if (e.key === 'Enter' && !e.shiftKey) { e.preventDefault(); this.noteContextChatForm.dispatchEvent(new Event('submit')); } }); } /** * Format markdown content for display */ private formatMarkdown(content: string): string { if (!content) return ''; // First, extract HTML thinking visualization to protect it from replacements const thinkingBlocks: string[] = []; let processedContent = content.replace(/
/g, (match) => { const placeholder = `__THINKING_BLOCK_${thinkingBlocks.length}__`; thinkingBlocks.push(match); return placeholder; }); // Use marked library to parse the markdown const markedContent = marked(processedContent, { breaks: true, // Convert line breaks to
gfm: true, // Enable GitHub Flavored Markdown silent: true // Ignore errors }); // Handle potential promise (though it shouldn't be with our options) if (typeof markedContent === 'string') { processedContent = markedContent; } else { console.warn('Marked returned a promise unexpectedly'); // Use the original content as fallback processedContent = content; } // Restore thinking visualization blocks thinkingBlocks.forEach((block, index) => { processedContent = processedContent.replace(`__THINKING_BLOCK_${index}__`, block); }); return processedContent; } /** * Validate embedding providers configuration * Check if there are issues with the embedding providers that might affect LLM functionality */ async validateEmbeddingProviders() { try { // Check if AI is enabled const aiEnabled = options.is('aiEnabled'); if (!aiEnabled) { this.validationWarning.style.display = 'none'; return; } // Get provider precedence const precedenceStr = options.get('aiProviderPrecedence') || 'openai,anthropic,ollama'; let precedenceList: string[] = []; if (precedenceStr) { if (precedenceStr.startsWith('[') && precedenceStr.endsWith(']')) { precedenceList = JSON.parse(precedenceStr); } else if (precedenceStr.includes(',')) { precedenceList = precedenceStr.split(',').map(p => p.trim()); } else { precedenceList = [precedenceStr]; } } // Get enabled providers - this is a simplification since we don't have direct DB access // We'll determine enabled status based on the presence of keys or settings const enabledProviders: string[] = []; // OpenAI is enabled if API key is set const openaiKey = options.get('openaiApiKey'); if (openaiKey) { enabledProviders.push('openai'); } // Anthropic is enabled if API key is set const anthropicKey = options.get('anthropicApiKey'); if (anthropicKey) { enabledProviders.push('anthropic'); } // Ollama is enabled if base URL is set const ollamaBaseUrl = options.get('ollamaBaseUrl'); if (ollamaBaseUrl) { enabledProviders.push('ollama'); } // Local is always available enabledProviders.push('local'); // Perform validation checks const allPrecedenceEnabled = precedenceList.every((p: string) => enabledProviders.includes(p)); // Get embedding queue status const embeddingStats = await server.get('llm/embeddings/stats') as { success: boolean, stats: { totalNotesCount: number; embeddedNotesCount: number; queuedNotesCount: number; failedNotesCount: number; lastProcessedDate: string | null; percentComplete: number; } }; const queuedNotes = embeddingStats?.stats?.queuedNotesCount || 0; const hasEmbeddingsInQueue = queuedNotes > 0; // Show warning if there are issues if (!allPrecedenceEnabled || hasEmbeddingsInQueue) { let message = 'AI Provider Configuration Issues'; message += '
    '; if (!allPrecedenceEnabled) { const disabledProviders = precedenceList.filter((p: string) => !enabledProviders.includes(p)); message += `
  • The following providers in your precedence list are not enabled: ${disabledProviders.join(', ')}.
  • `; } if (hasEmbeddingsInQueue) { message += `
  • Currently processing embeddings for ${queuedNotes} notes. Some AI features may produce incomplete results until processing completes.
  • `; } message += '
'; message += ''; // Update HTML content - no need to attach event listeners here anymore this.validationWarning.innerHTML = message; this.validationWarning.style.display = 'block'; } else { this.validationWarning.style.display = 'none'; } } catch (error) { console.error('Error validating embedding providers:', error); this.validationWarning.style.display = 'none'; } } }