add embedding recreation button back

This commit is contained in:
perf3ct 2025-03-30 19:32:38 +00:00
parent d8d41a14cf
commit 9dd76873ac
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232
4 changed files with 296 additions and 188 deletions

View File

@ -93,7 +93,7 @@ export default class LlmChatPanel extends BasicWidget {
private validationWarning!: HTMLElement; private validationWarning!: HTMLElement;
private sessionId: string | null = null; private sessionId: string | null = null;
private currentNoteId: string | null = null; private currentNoteId: string | null = null;
// Callbacks for data persistence // Callbacks for data persistence
private onSaveData: ((data: any) => Promise<void>) | null = null; private onSaveData: ((data: any) => Promise<void>) | null = null;
private onGetData: (() => Promise<any>) | null = null; private onGetData: (() => Promise<any>) | null = null;
@ -127,7 +127,7 @@ export default class LlmChatPanel extends BasicWidget {
// Don't create a session here - wait for refresh // Don't create a session here - wait for refresh
// This prevents the wrong session from being created for the wrong note // This prevents the wrong session from being created for the wrong note
return this.$widget; return this.$widget;
} }
@ -141,7 +141,7 @@ export default class LlmChatPanel extends BasicWidget {
this.onSaveData = saveDataCallback; this.onSaveData = saveDataCallback;
this.onGetData = getDataCallback; this.onGetData = getDataCallback;
} }
/** /**
* Load saved chat data from the note * Load saved chat data from the note
*/ */
@ -150,21 +150,21 @@ export default class LlmChatPanel extends BasicWidget {
console.log("No getData callback available"); console.log("No getData callback available");
return; return;
} }
try { try {
const data = await this.onGetData(); const data = await this.onGetData();
console.log(`Loading chat data for noteId: ${this.currentNoteId}`, data); console.log(`Loading chat data for noteId: ${this.currentNoteId}`, data);
// Make sure we're loading data for the correct note // Make sure we're loading data for the correct note
if (data && data.noteId && data.noteId !== this.currentNoteId) { if (data && data.noteId && data.noteId !== this.currentNoteId) {
console.warn(`Data noteId ${data.noteId} doesn't match current noteId ${this.currentNoteId}`); console.warn(`Data noteId ${data.noteId} doesn't match current noteId ${this.currentNoteId}`);
} }
if (data && data.messages && Array.isArray(data.messages)) { if (data && data.messages && Array.isArray(data.messages)) {
// Clear existing messages in the UI // Clear existing messages in the UI
this.noteContextChatMessages.innerHTML = ''; this.noteContextChatMessages.innerHTML = '';
this.messages = []; this.messages = [];
// Add each message to the UI // Add each message to the UI
data.messages.forEach((message: {role: string; content: string}) => { data.messages.forEach((message: {role: string; content: string}) => {
if (message.role === 'user' || message.role === 'assistant') { if (message.role === 'user' || message.role === 'assistant') {
@ -173,20 +173,20 @@ export default class LlmChatPanel extends BasicWidget {
this.messages.push(message); this.messages.push(message);
} }
}); });
// Scroll to bottom // Scroll to bottom
this.chatContainer.scrollTop = this.chatContainer.scrollHeight; this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
console.log(`Successfully loaded ${data.messages.length} messages for noteId: ${this.currentNoteId}`); console.log(`Successfully loaded ${data.messages.length} messages for noteId: ${this.currentNoteId}`);
return true; return true;
} }
} catch (e) { } catch (e) {
console.error(`Error loading saved chat data for noteId: ${this.currentNoteId}:`, e); console.error(`Error loading saved chat data for noteId: ${this.currentNoteId}:`, e);
} }
return false; return false;
} }
/** /**
* Save the current chat data to the note * Save the current chat data to the note
*/ */
@ -195,7 +195,7 @@ export default class LlmChatPanel extends BasicWidget {
console.log("No saveData callback available"); console.log("No saveData callback available");
return; return;
} }
try { try {
// Include the current note ID for tracking purposes // Include the current note ID for tracking purposes
await this.onSaveData({ await this.onSaveData({
@ -221,24 +221,24 @@ export default class LlmChatPanel extends BasicWidget {
// Get current note context if needed // Get current note context if needed
const currentActiveNoteId = appContext.tabManager.getActiveContext()?.note?.noteId || null; const currentActiveNoteId = appContext.tabManager.getActiveContext()?.note?.noteId || null;
// If we're switching to a different note, we need to reset // If we're switching to a different note, we need to reset
if (this.currentNoteId !== currentActiveNoteId) { if (this.currentNoteId !== currentActiveNoteId) {
console.log(`Note ID changed from ${this.currentNoteId} to ${currentActiveNoteId}, resetting chat panel`); console.log(`Note ID changed from ${this.currentNoteId} to ${currentActiveNoteId}, resetting chat panel`);
// Reset the UI and data // Reset the UI and data
this.noteContextChatMessages.innerHTML = ''; this.noteContextChatMessages.innerHTML = '';
this.messages = []; this.messages = [];
this.sessionId = null; this.sessionId = null;
this.hideSources(); // Hide any sources from previous note this.hideSources(); // Hide any sources from previous note
// Update our current noteId // Update our current noteId
this.currentNoteId = currentActiveNoteId; this.currentNoteId = currentActiveNoteId;
} }
// Always try to load saved data for the current note // Always try to load saved data for the current note
const hasSavedData = await this.loadSavedData(); const hasSavedData = await this.loadSavedData();
// Only create a new session if we don't have a session or saved data // Only create a new session if we don't have a session or saved data
if (!this.sessionId || !hasSavedData) { if (!this.sessionId || !hasSavedData) {
// Create a new chat session // Create a new chat session
@ -264,6 +264,9 @@ export default class LlmChatPanel extends BasicWidget {
} }
} }
/**
* Handle sending a user message to the LLM service
*/
private async sendMessage(content: string) { private async sendMessage(content: string) {
if (!content.trim() || !this.sessionId) { if (!content.trim() || !this.sessionId) {
return; return;
@ -272,21 +275,10 @@ export default class LlmChatPanel extends BasicWidget {
// Check for provider validation issues before sending // Check for provider validation issues before sending
await this.validateEmbeddingProviders(); await this.validateEmbeddingProviders();
// Add user message to the chat // Process the user message
this.addMessageToChat('user', content); await this.processUserMessage(content);
// Add to our local message array too // Clear input and show loading state
this.messages.push({
role: 'user',
content,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save user message to note:", err);
});
this.noteContextChatInput.value = ''; this.noteContextChatInput.value = '';
this.showLoadingIndicator(); this.showLoadingIndicator();
this.hideSources(); this.hideSources();
@ -306,183 +298,260 @@ export default class LlmChatPanel extends BasicWidget {
showThinking showThinking
}; };
// First, send the message via POST request // First try to get a direct response
const postResponse = await server.post<any>(`llm/sessions/${this.sessionId}/messages`, messageParams); const handled = await this.handleDirectResponse(messageParams);
if (handled) return;
// If the POST request returned content directly, display it // If no direct response, set up streaming
if (postResponse && postResponse.content) { await this.setupStreamingResponse(messageParams);
this.addMessageToChat('assistant', postResponse.content); } catch (error) {
this.handleError(error as Error);
// Add to our local message array too }
this.messages.push({ }
role: 'assistant',
content: postResponse.content,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant response to note:", err);
});
// If there are sources, show them /**
if (postResponse.sources && postResponse.sources.length > 0) { * Process a new user message - add to UI and save
this.showSources(postResponse.sources); */
} private async processUserMessage(content: string) {
// Add user message to the chat UI
this.addMessageToChat('user', content);
this.hideLoadingIndicator(); // Add to our local message array too
return; this.messages.push({
role: 'user',
content,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save user message to note:", err);
});
}
/**
* Try to get a direct response from the server
* @returns true if response was handled, false if streaming should be used
*/
private async handleDirectResponse(messageParams: any): Promise<boolean> {
// Send the message via POST request
const postResponse = await server.post<any>(`llm/sessions/${this.sessionId}/messages`, messageParams);
// If the POST request returned content directly, display it
if (postResponse && postResponse.content) {
this.processAssistantResponse(postResponse.content);
// If there are sources, show them
if (postResponse.sources && postResponse.sources.length > 0) {
this.showSources(postResponse.sources);
} }
// Then set up streaming via EventSource this.hideLoadingIndicator();
const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`; return true;
const source = new EventSource(streamUrl); }
let assistantResponse = ''; return false;
let receivedAnyContent = false; }
let timeoutId: number | null = null;
// Set a timeout to handle case where streaming doesn't work properly /**
timeoutId = window.setTimeout(() => { * Process an assistant response - add to UI and save
if (!receivedAnyContent) { */
// If we haven't received any content after a reasonable timeout (10 seconds), private async processAssistantResponse(content: string) {
// add a fallback message and close the stream // Add the response to the chat UI
this.hideLoadingIndicator(); this.addMessageToChat('assistant', content);
const errorMessage = 'I\'m having trouble generating a response right now. Please try again later.';
this.addMessageToChat('assistant', errorMessage);
// Add to our local message array too
this.messages.push({
role: 'assistant',
content: errorMessage,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant error response to note:", err);
});
source.close();
}
}, 10000);
// Handle streaming response // Add to our local message array too
source.onmessage = (event) => { this.messages.push({
if (event.data === '[DONE]') { role: 'assistant',
// Stream completed content,
source.close(); timestamp: new Date()
this.hideLoadingIndicator(); });
// Clear the timeout since we're done // Save to note
if (timeoutId !== null) { this.saveCurrentData().catch(err => {
window.clearTimeout(timeoutId); console.error("Failed to save assistant response to note:", err);
} });
}
// If we didn't receive any content but the stream completed normally, /**
// display a message to the user * Set up streaming response from the server
if (!receivedAnyContent) { */
const defaultMessage = 'I processed your request, but I don\'t have any specific information to share at the moment.'; private async setupStreamingResponse(messageParams: any) {
this.addMessageToChat('assistant', defaultMessage); const useAdvancedContext = messageParams.useAdvancedContext;
const showThinking = messageParams.showThinking;
// Add to our local message array too
this.messages.push({
role: 'assistant',
content: defaultMessage,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant response to note:", err);
});
} else if (assistantResponse) {
// Save the completed streaming response to the message array
this.messages.push({
role: 'assistant',
content: assistantResponse,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant response to note:", err);
});
}
return;
}
try { // Set up streaming via EventSource
const data = JSON.parse(event.data); const streamUrl = `./api/llm/sessions/${this.sessionId}/messages?format=stream&useAdvancedContext=${useAdvancedContext}&showThinking=${showThinking}`;
console.log("Received streaming data:", data); // Debug log const source = new EventSource(streamUrl);
// Handle both content and error cases let assistantResponse = '';
if (data.content) { let receivedAnyContent = false;
receivedAnyContent = true; let timeoutId: number | null = null;
assistantResponse += data.content;
// Update the UI with the accumulated response // Set up timeout for streaming response
const assistantElement = this.noteContextChatMessages.querySelector('.assistant-message:last-child .message-content'); timeoutId = this.setupStreamingTimeout(source);
if (assistantElement) {
assistantElement.innerHTML = this.formatMarkdown(assistantResponse);
// Apply syntax highlighting to any code blocks in the updated content
applySyntaxHighlight($(assistantElement as HTMLElement));
} else {
this.addMessageToChat('assistant', assistantResponse);
}
} else if (data.error) {
// Handle error message
this.hideLoadingIndicator();
this.addMessageToChat('assistant', `Error: ${data.error}`);
receivedAnyContent = true;
source.close();
if (timeoutId !== null) { // Handle streaming response
window.clearTimeout(timeoutId); source.onmessage = (event) => this.handleStreamingMessage(
} event,
} source,
timeoutId,
assistantResponse,
receivedAnyContent
);
// Scroll to the bottom // Handle streaming errors
this.chatContainer.scrollTop = this.chatContainer.scrollHeight; source.onerror = () => this.handleStreamingError(
} catch (e) { source,
console.error('Error parsing SSE message:', e, 'Raw data:', event.data); timeoutId,
} receivedAnyContent
}; );
}
source.onerror = () => { /**
source.close(); * Set up timeout for streaming response
* @returns Timeout ID for the created timeout
*/
private setupStreamingTimeout(source: EventSource): number {
// Set a timeout to handle case where streaming doesn't work properly
return window.setTimeout(() => {
// If we haven't received any content after a reasonable timeout (10 seconds),
// add a fallback message and close the stream
this.hideLoadingIndicator();
const errorMessage = 'I\'m having trouble generating a response right now. Please try again later.';
this.processAssistantResponse(errorMessage);
source.close();
}, 10000);
}
/**
* Handle messages from the streaming response
*/
private handleStreamingMessage(
event: MessageEvent,
source: EventSource,
timeoutId: number | null,
assistantResponse: string,
receivedAnyContent: boolean
) {
if (event.data === '[DONE]') {
this.handleStreamingComplete(source, timeoutId, receivedAnyContent, assistantResponse);
return;
}
try {
const data = JSON.parse(event.data);
console.log("Received streaming data:", data); // Debug log
// Handle both content and error cases
if (data.content) {
receivedAnyContent = true;
assistantResponse += data.content;
// Update the UI with the accumulated response
this.updateStreamingUI(assistantResponse);
} else if (data.error) {
// Handle error message
this.hideLoadingIndicator(); this.hideLoadingIndicator();
this.addMessageToChat('assistant', `Error: ${data.error}`);
receivedAnyContent = true;
source.close();
// Clear the timeout if there was an error
if (timeoutId !== null) { if (timeoutId !== null) {
window.clearTimeout(timeoutId); window.clearTimeout(timeoutId);
} }
}
// Only show error message if we haven't received any content yet // Scroll to the bottom
if (!receivedAnyContent) { this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
const connectionError = 'Error connecting to the LLM service. Please try again.'; } catch (e) {
this.addMessageToChat('assistant', connectionError); console.error('Error parsing SSE message:', e, 'Raw data:', event.data);
// Add to our local message array too
this.messages.push({
role: 'assistant',
content: connectionError,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save connection error to note:", err);
});
}
};
} catch (error) {
this.hideLoadingIndicator();
toastService.showError('Error sending message: ' + (error as Error).message);
} }
} }
/**
* Update the UI with streaming content as it arrives
*/
private updateStreamingUI(assistantResponse: string) {
const assistantElement = this.noteContextChatMessages.querySelector('.assistant-message:last-child .message-content');
if (assistantElement) {
assistantElement.innerHTML = this.formatMarkdown(assistantResponse);
// Apply syntax highlighting to any code blocks in the updated content
applySyntaxHighlight($(assistantElement as HTMLElement));
} else {
this.addMessageToChat('assistant', assistantResponse);
}
}
/**
* Handle completion of streaming response
*/
private handleStreamingComplete(
source: EventSource,
timeoutId: number | null,
receivedAnyContent: boolean,
assistantResponse: string
) {
// Stream completed
source.close();
this.hideLoadingIndicator();
// Clear the timeout since we're done
if (timeoutId !== null) {
window.clearTimeout(timeoutId);
}
// If we didn't receive any content but the stream completed normally,
// display a message to the user
if (!receivedAnyContent) {
const defaultMessage = 'I processed your request, but I don\'t have any specific information to share at the moment.';
this.processAssistantResponse(defaultMessage);
} else if (assistantResponse) {
// Save the completed streaming response to the message array
this.messages.push({
role: 'assistant',
content: assistantResponse,
timestamp: new Date()
});
// Save to note
this.saveCurrentData().catch(err => {
console.error("Failed to save assistant response to note:", err);
});
}
}
/**
* Handle errors during streaming response
*/
private handleStreamingError(
source: EventSource,
timeoutId: number | null,
receivedAnyContent: boolean
) {
source.close();
this.hideLoadingIndicator();
// Clear the timeout if there was an error
if (timeoutId !== null) {
window.clearTimeout(timeoutId);
}
// Only show error message if we haven't received any content yet
if (!receivedAnyContent) {
const connectionError = 'Error connecting to the LLM service. Please try again.';
this.processAssistantResponse(connectionError);
}
}
/**
* Handle general errors in the send message flow
*/
private handleError(error: Error) {
this.hideLoadingIndicator();
toastService.showError('Error sending message: ' + error.message);
}
private addMessageToChat(role: 'user' | 'assistant', content: string) { private addMessageToChat(role: 'user' | 'assistant', content: string) {
const messageElement = document.createElement('div'); const messageElement = document.createElement('div');
messageElement.className = `chat-message ${role}-message mb-3 d-flex`; messageElement.className = `chat-message ${role}-message mb-3 d-flex`;

View File

@ -145,11 +145,28 @@ export default class AiSettingsWidget extends OptionsWidget {
await this.fetchFailedEmbeddingNotes(); await this.fetchFailedEmbeddingNotes();
}); });
// Recreate embeddings button
const $recreateEmbeddings = this.$widget.find('.recreate-embeddings');
$recreateEmbeddings.on('click', async () => {
if (confirm(t("ai_llm.recreate_embeddings_confirm") || "Are you sure you want to recreate all embeddings? This may take a long time.")) {
try {
await server.post('embeddings/reprocess');
toastService.showMessage(t("ai_llm.recreate_embeddings_started"));
// Start progress polling
this.pollIndexRebuildProgress();
} catch (e) {
console.error('Error starting embeddings regeneration:', e);
toastService.showError(t("ai_llm.recreate_embeddings_error"));
}
}
});
// Rebuild index button // Rebuild index button
const $rebuildIndex = this.$widget.find('.rebuild-embeddings-index'); const $rebuildIndex = this.$widget.find('.rebuild-embeddings-index');
$rebuildIndex.on('click', async () => { $rebuildIndex.on('click', async () => {
try { try {
await server.post('embeddings/rebuild'); await server.post('embeddings/rebuild-index');
toastService.showMessage(t("ai_llm.rebuild_index_started")); toastService.showMessage(t("ai_llm.rebuild_index_started"));
// Start progress polling // Start progress polling
@ -340,7 +357,7 @@ export default class AiSettingsWidget extends OptionsWidget {
if (!this.$widget) return; if (!this.$widget) return;
try { try {
const response = await server.get<FailedEmbeddingNotes>('embeddings/failed-notes'); const response = await server.get<FailedEmbeddingNotes>('embeddings/failed');
if (response && response.success) { if (response && response.success) {
const failedNotes = response.failedNotes || []; const failedNotes = response.failedNotes || [];

View File

@ -269,7 +269,15 @@ export const TPL = `
<div class="form-text">${t("ai_llm.embedding_auto_update_enabled_description")}</div> <div class="form-text">${t("ai_llm.embedding_auto_update_enabled_description")}</div>
</div> </div>
<!-- Rebuild index button with counter --> <!-- Recreate embeddings button -->
<div class="form-group mt-3">
<button class="btn btn-outline-primary recreate-embeddings">
${t("ai_llm.recreate_embeddings")}
</button>
<div class="form-text">${t("ai_llm.recreate_embeddings_description")}</div>
</div>
<!-- Rebuild index button -->
<div class="form-group mt-3"> <div class="form-group mt-3">
<button class="btn btn-outline-primary rebuild-embeddings-index"> <button class="btn btn-outline-primary rebuild-embeddings-index">
${t("ai_llm.rebuild_index")} ${t("ai_llm.rebuild_index")}

View File

@ -1204,8 +1204,22 @@
"enable_automatic_indexing_description": "Automatically generate embeddings for new and updated notes", "enable_automatic_indexing_description": "Automatically generate embeddings for new and updated notes",
"embedding_auto_update_enabled": "Auto-update Embeddings", "embedding_auto_update_enabled": "Auto-update Embeddings",
"embedding_auto_update_enabled_description": "Automatically update embeddings when notes are modified", "embedding_auto_update_enabled_description": "Automatically update embeddings when notes are modified",
"recreate_embeddings": "Recreate All Embeddings",
"recreate_embeddings_description": "Regenerate all note embeddings from scratch (may take a long time for large note collections)",
"recreate_embeddings_started": "Embeddings regeneration started. This may take a long time for large note collections.",
"recreate_embeddings_error": "Error starting embeddings regeneration. Check logs for details.",
"recreate_embeddings_confirm": "Are you sure you want to recreate all embeddings? This may take a long time for large note collections.",
"rebuild_index": "Rebuild Index", "rebuild_index": "Rebuild Index",
"rebuild_index_description": "Regenerate all note embeddings (may take some time for large note collections)", "rebuild_index_description": "Rebuild the vector search index for better performance (much faster than recreating embeddings)",
"rebuild_index_started": "Embedding index rebuild started. This may take several minutes.",
"rebuild_index_error": "Error starting index rebuild. Check logs for details.",
"note_title": "Note Title",
"error": "Error",
"last_attempt": "Last Attempt",
"actions": "Actions",
"retry": "Retry",
"retry_queued": "Note queued for retry",
"retry_failed": "Failed to queue note for retry",
"embedding_provider_precedence_description": "Comma-separated list of providers in order of precedence for embeddings search (e.g., 'openai,ollama,anthropic')", "embedding_provider_precedence_description": "Comma-separated list of providers in order of precedence for embeddings search (e.g., 'openai,ollama,anthropic')",
"embedding_dimension_strategy": "Embedding Dimension Strategy", "embedding_dimension_strategy": "Embedding Dimension Strategy",
"embedding_dimension_auto": "Auto (Recommended)", "embedding_dimension_auto": "Auto (Recommended)",