diff --git a/src/services/llm/embeddings/chunking.ts b/src/services/llm/embeddings/chunking/chunking.ts similarity index 96% rename from src/services/llm/embeddings/chunking.ts rename to src/services/llm/embeddings/chunking/chunking.ts index a0540f0dc..6dd1ab5dc 100644 --- a/src/services/llm/embeddings/chunking.ts +++ b/src/services/llm/embeddings/chunking/chunking.ts @@ -1,8 +1,8 @@ -import log from "../../../services/log.js"; -import dateUtils from "../../../services/date_utils.js"; -import sql from "../../../services/sql.js"; -import becca from "../../../becca/becca.js"; -import type { NoteEmbeddingContext } from "./types.js"; +import log from "../../../log.js"; +import dateUtils from "../../../date_utils.js"; +import sql from "../../../sql.js"; +import becca from "../../../../becca/becca.js"; +import type { NoteEmbeddingContext } from "../types.js"; // Remove static imports that cause circular dependencies // import { storeNoteEmbedding, deleteNoteEmbeddings } from "./storage.js"; @@ -101,7 +101,7 @@ export async function processNoteWithChunking( try { // Get the context extractor dynamically to avoid circular dependencies - const { ContextExtractor } = await import('../context/index.js'); + const { ContextExtractor } = await import('../../context/index.js'); const contextExtractor = new ContextExtractor(); // Get note from becca @@ -118,8 +118,8 @@ export async function processNoteWithChunking( { // Adjust chunk size based on provider using constants maxChunkSize: provider.name === 'ollama' ? - (await import('../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.OLLAMA_SIZE : - (await import('../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.DEFAULT_SIZE, + (await import('../../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.OLLAMA_SIZE : + (await import('../../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.DEFAULT_SIZE, respectBoundaries: true } ); @@ -130,7 +130,7 @@ export async function processNoteWithChunking( const config = provider.getConfig(); // Use dynamic import instead of static import - const storage = await import('./storage.js'); + const storage = await import('../storage.js'); await storage.storeNoteEmbedding(noteId, provider.name, config.model, embedding); log.info(`Generated single embedding for note ${noteId} (${note.title}) since chunking failed`); @@ -142,7 +142,7 @@ export async function processNoteWithChunking( // Delete existing embeddings first to avoid duplicates // Use dynamic import - const storage = await import('./storage.js'); + const storage = await import('../storage.js'); await storage.deleteNoteEmbeddings(noteId, provider.name, config.model); // Track successful and failed chunks in memory during this processing run diff --git a/src/services/llm/embeddings/chunking_interface.ts b/src/services/llm/embeddings/chunking/chunking_interface.ts similarity index 91% rename from src/services/llm/embeddings/chunking_interface.ts rename to src/services/llm/embeddings/chunking/chunking_interface.ts index 1a429e121..301bdec05 100644 --- a/src/services/llm/embeddings/chunking_interface.ts +++ b/src/services/llm/embeddings/chunking/chunking_interface.ts @@ -1,4 +1,4 @@ -import type { NoteEmbeddingContext } from "./types.js"; +import type { NoteEmbeddingContext } from "../types.js"; /** * Interface for chunking operations diff --git a/src/services/llm/embeddings/index.ts b/src/services/llm/embeddings/index.ts index 549ce7f63..10a857b99 100644 --- a/src/services/llm/embeddings/index.ts +++ b/src/services/llm/embeddings/index.ts @@ -8,7 +8,7 @@ import * as queue from './queue.js'; import * as events from './events.js'; import * as stats from './stats.js'; import * as indexOperations from './index_operations.js'; -import { getChunkingOperations } from './chunking_interface.js'; +import { getChunkingOperations } from './chunking/chunking_interface.js'; import type { NoteEmbeddingContext } from './types.js'; // Export types diff --git a/src/services/llm/embeddings/queue.ts b/src/services/llm/embeddings/queue.ts index ff7ffcfff..76ae6ad22 100644 --- a/src/services/llm/embeddings/queue.ts +++ b/src/services/llm/embeddings/queue.ts @@ -7,7 +7,7 @@ import { getEnabledEmbeddingProviders } from "./providers.js"; import { getNoteEmbeddingContext } from "./content_processing.js"; import { deleteNoteEmbeddings } from "./storage.js"; import type { QueueItem } from "./types.js"; -import { getChunkingOperations } from "./chunking_interface.js"; +import { getChunkingOperations } from "./chunking/chunking_interface.js"; import indexService from '../index_service.js'; /** @@ -289,10 +289,10 @@ export async function processEmbeddingQueue() { // This allows manual retries later if (noteData.attempts + 1 >= 3) { log.error(`Marked note ${noteData.noteId} as permanently failed after multiple embedding attempts`); - + // Update the attempts to a very high number to indicate permanent failure await sql.execute(` - UPDATE embedding_queue + UPDATE embedding_queue SET attempts = 999 WHERE noteId = ? `, [noteData.noteId]);