diff --git a/src/public/app/widgets/type_widgets/options/ai_settings.ts b/src/public/app/widgets/type_widgets/options/ai_settings.ts index badc45721..f6128f656 100644 --- a/src/public/app/widgets/type_widgets/options/ai_settings.ts +++ b/src/public/app/widgets/type_widgets/options/ai_settings.ts @@ -17,6 +17,19 @@ interface OllamaModelResponse { }>; } +// Interface for embedding statistics +interface EmbeddingStats { + success: boolean; + stats: { + totalNotesCount: number; + embeddedNotesCount: number; + queuedNotesCount: number; + failedNotesCount: number; + lastProcessedDate: string | null; + percentComplete: number; + } +} + export default class AiSettingsWidget extends OptionsWidget { doRender() { this.$widget = $(` @@ -175,6 +188,26 @@ export default class AiSettingsWidget extends OptionsWidget {
${t("ai_llm.reprocess_all_embeddings_description")}
+ +
+ +
+
+
${t("ai_llm.total_notes")}: -
+
${t("ai_llm.processed_notes")}: -
+
${t("ai_llm.queued_notes")}: -
+
${t("ai_llm.failed_notes")}: -
+
${t("ai_llm.last_processed")}: -
+
+
0%
+
+
+ +
+
`); @@ -253,44 +286,44 @@ export default class AiSettingsWidget extends OptionsWidget { $refreshModels.on('click', async () => { $refreshModels.prop('disabled', true); $refreshModels.text(t("ai_llm.refreshing_models")); - + try { const ollamaBaseUrl = this.$widget.find('.ollama-base-url').val() as string; const response = await server.post('ollama/list-models', { baseUrl: ollamaBaseUrl }); - + if (response && response.success && response.models && response.models.length > 0) { const $embedModelSelect = this.$widget.find('.ollama-embedding-model'); const currentValue = $embedModelSelect.val(); - + // Clear existing options $embedModelSelect.empty(); - + // Add embedding-specific models first - const embeddingModels = response.models.filter(model => + const embeddingModels = response.models.filter(model => model.name.includes('embed') || model.name.includes('bert')); - + embeddingModels.forEach(model => { $embedModelSelect.append(``); }); - + // Add separator if we have both types if (embeddingModels.length > 0) { $embedModelSelect.append(``); } - + // Add other models (LLMs can also generate embeddings) - const otherModels = response.models.filter(model => + const otherModels = response.models.filter(model => !model.name.includes('embed') && !model.name.includes('bert')); - + otherModels.forEach(model => { $embedModelSelect.append(``); }); - + // Restore previous selection if possible if (currentValue) { $embedModelSelect.val(currentValue); } - + toastService.showMessage("Models refreshed successfully"); } else { toastService.showError("No models found from Ollama server"); @@ -333,6 +366,8 @@ export default class AiSettingsWidget extends OptionsWidget { try { await server.post('embeddings/reprocess'); toastService.showMessage(t("ai_llm.reprocess_started")); + // Refresh stats after reprocessing starts + await this.refreshEmbeddingStats(); } catch (error) { console.error("Error reprocessing embeddings:", error); toastService.showError(t("ai_llm.reprocess_error")); @@ -342,9 +377,57 @@ export default class AiSettingsWidget extends OptionsWidget { } }); + const $embeddingRefreshStats = this.$widget.find('.embedding-refresh-stats'); + $embeddingRefreshStats.on('click', async () => { + await this.refreshEmbeddingStats(); + }); + + // Initial fetch of embedding stats + setTimeout(async () => { + await this.refreshEmbeddingStats(); + }, 500); + return this.$widget; } + async refreshEmbeddingStats() { + if (!this.$widget) return; + + try { + const $refreshButton = this.$widget.find('.embedding-refresh-stats'); + $refreshButton.prop('disabled', true); + $refreshButton.text(t("ai_llm.refreshing")); + + const response = await server.get('embeddings/stats'); + + if (response && response.success) { + const stats = response.stats; + + this.$widget.find('.embedding-total-notes').text(stats.totalNotesCount); + this.$widget.find('.embedding-processed-notes').text(stats.embeddedNotesCount); + this.$widget.find('.embedding-queued-notes').text(stats.queuedNotesCount); + this.$widget.find('.embedding-failed-notes').text(stats.failedNotesCount); + + const lastProcessed = stats.lastProcessedDate + ? new Date(stats.lastProcessedDate).toLocaleString() + : t("ai_llm.never"); + this.$widget.find('.embedding-last-processed').text(lastProcessed); + + const $progressBar = this.$widget.find('.embedding-progress'); + $progressBar.css('width', `${stats.percentComplete}%`); + $progressBar.attr('aria-valuenow', stats.percentComplete.toString()); + $progressBar.text(`${stats.percentComplete}%`); + } + } catch (error) { + console.error("Error fetching embedding stats:", error); + toastService.showError(t("ai_llm.stats_error")); + } finally { + const $refreshButton = this.$widget.find('.embedding-refresh-stats'); + $refreshButton.prop('disabled', false); + $refreshButton.text(t("ai_llm.refresh_stats")); + } + } + updateAiSectionVisibility() { if (!this.$widget) return; diff --git a/src/public/translations/en/translation.json b/src/public/translations/en/translation.json index d277e12c8..ed42e1917 100644 --- a/src/public/translations/en/translation.json +++ b/src/public/translations/en/translation.json @@ -1161,11 +1161,22 @@ "embedding_update_interval_description": "Time between processing batches of embeddings (in milliseconds)", "embedding_default_dimension": "Default Dimension", "embedding_default_dimension_description": "Default embedding vector dimension when creating new embeddings", - "reprocess_all_embeddings": "Reprocess All Notes", - "reprocess_all_embeddings_description": "Queue all notes for embedding generation or update", - "reprocessing_embeddings": "Processing...", - "reprocess_started": "All notes have been queued for embedding processing", - "reprocess_error": "Error starting embedding reprocessing" + "reprocess_all_embeddings": "Reprocess All Embeddings", + "reprocess_all_embeddings_description": "Queue all notes for embedding processing. This may take some time depending on your number of notes.", + "reprocessing_embeddings": "Reprocessing...", + "reprocess_started": "Embedding reprocessing started in the background", + "reprocess_error": "Error starting embedding reprocessing", + + "embedding_statistics": "Embedding Statistics", + "total_notes": "Total Notes", + "processed_notes": "Processed Notes", + "queued_notes": "Queued Notes", + "failed_notes": "Failed Notes", + "last_processed": "Last Processed", + "never": "Never", + "refresh_stats": "Refresh Stats", + "refreshing": "Refreshing...", + "stats_error": "Error fetching embedding statistics" }, "zoom_factor": { "title": "Zoom Factor (desktop build only)", diff --git a/src/routes/api/embeddings.ts b/src/routes/api/embeddings.ts index e1a42e0e5..ca1758df0 100644 --- a/src/routes/api/embeddings.ts +++ b/src/routes/api/embeddings.ts @@ -191,11 +191,24 @@ async function getQueueStatus(req: Request, res: Response) { }; } +/** + * Get embedding statistics + */ +async function getEmbeddingStats(req: Request, res: Response) { + const stats = await vectorStore.getEmbeddingStats(); + + return { + success: true, + stats + }; +} + export default { findSimilarNotes, searchByText, getProviders, updateProvider, reprocessAllNotes, - getQueueStatus + getQueueStatus, + getEmbeddingStats }; diff --git a/src/routes/routes.ts b/src/routes/routes.ts index dafe73d0e..55754f4d1 100644 --- a/src/routes/routes.ts +++ b/src/routes/routes.ts @@ -378,6 +378,7 @@ function register(app: express.Application) { apiRoute(PATCH, "/api/embeddings/providers/:providerId", embeddingsRoute.updateProvider); apiRoute(PST, "/api/embeddings/reprocess", embeddingsRoute.reprocessAllNotes); apiRoute(GET, "/api/embeddings/queue-status", embeddingsRoute.getQueueStatus); + apiRoute(GET, "/api/embeddings/stats", embeddingsRoute.getEmbeddingStats); // Ollama API endpoints route(PST, "/api/ollama/list-models", [auth.checkApiAuth, csrfMiddleware], ollamaRoute.listModels, apiResultHandler); diff --git a/src/services/llm/embeddings/vector_store.ts b/src/services/llm/embeddings/vector_store.ts index f02434194..568fedbfa 100644 --- a/src/services/llm/embeddings/vector_store.ts +++ b/src/services/llm/embeddings/vector_store.ts @@ -471,6 +471,41 @@ export async function reprocessAllNotes() { } } +/** + * Get current embedding statistics + */ +export async function getEmbeddingStats() { + const totalNotesCount = await sql.getValue( + "SELECT COUNT(*) FROM notes WHERE isDeleted = 0" + ) as number; + + const embeddedNotesCount = await sql.getValue( + "SELECT COUNT(DISTINCT noteId) FROM note_embeddings" + ) as number; + + const queuedNotesCount = await sql.getValue( + "SELECT COUNT(*) FROM embedding_queue" + ) as number; + + const failedNotesCount = await sql.getValue( + "SELECT COUNT(*) FROM embedding_queue WHERE attempts > 0" + ) as number; + + // Get the last processing time by checking the most recent embedding + const lastProcessedDate = await sql.getValue( + "SELECT utcDateCreated FROM note_embeddings ORDER BY utcDateCreated DESC LIMIT 1" + ) as string | null || null; + + return { + totalNotesCount, + embeddedNotesCount, + queuedNotesCount, + failedNotesCount, + lastProcessedDate, + percentComplete: totalNotesCount > 0 ? Math.round((embeddedNotesCount / totalNotesCount) * 100) : 0 + }; +} + export default { cosineSimilarity, embeddingToBuffer, @@ -485,5 +520,6 @@ export default { setupEmbeddingEventListeners, setupEmbeddingBackgroundProcessing, initEmbeddings, - reprocessAllNotes + reprocessAllNotes, + getEmbeddingStats };