mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-29 11:02:28 +08:00
move chunking to its own folder
This commit is contained in:
parent
46a6533e57
commit
73445d97e7
@ -1,8 +1,8 @@
|
||||
import log from "../../../services/log.js";
|
||||
import dateUtils from "../../../services/date_utils.js";
|
||||
import sql from "../../../services/sql.js";
|
||||
import becca from "../../../becca/becca.js";
|
||||
import type { NoteEmbeddingContext } from "./types.js";
|
||||
import log from "../../../log.js";
|
||||
import dateUtils from "../../../date_utils.js";
|
||||
import sql from "../../../sql.js";
|
||||
import becca from "../../../../becca/becca.js";
|
||||
import type { NoteEmbeddingContext } from "../types.js";
|
||||
// Remove static imports that cause circular dependencies
|
||||
// import { storeNoteEmbedding, deleteNoteEmbeddings } from "./storage.js";
|
||||
|
||||
@ -101,7 +101,7 @@ export async function processNoteWithChunking(
|
||||
|
||||
try {
|
||||
// Get the context extractor dynamically to avoid circular dependencies
|
||||
const { ContextExtractor } = await import('../context/index.js');
|
||||
const { ContextExtractor } = await import('../../context/index.js');
|
||||
const contextExtractor = new ContextExtractor();
|
||||
|
||||
// Get note from becca
|
||||
@ -118,8 +118,8 @@ export async function processNoteWithChunking(
|
||||
{
|
||||
// Adjust chunk size based on provider using constants
|
||||
maxChunkSize: provider.name === 'ollama' ?
|
||||
(await import('../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.OLLAMA_SIZE :
|
||||
(await import('../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.DEFAULT_SIZE,
|
||||
(await import('../../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.OLLAMA_SIZE :
|
||||
(await import('../../../../routes/api/llm.js')).LLM_CONSTANTS.CHUNKING.DEFAULT_SIZE,
|
||||
respectBoundaries: true
|
||||
}
|
||||
);
|
||||
@ -130,7 +130,7 @@ export async function processNoteWithChunking(
|
||||
const config = provider.getConfig();
|
||||
|
||||
// Use dynamic import instead of static import
|
||||
const storage = await import('./storage.js');
|
||||
const storage = await import('../storage.js');
|
||||
await storage.storeNoteEmbedding(noteId, provider.name, config.model, embedding);
|
||||
|
||||
log.info(`Generated single embedding for note ${noteId} (${note.title}) since chunking failed`);
|
||||
@ -142,7 +142,7 @@ export async function processNoteWithChunking(
|
||||
|
||||
// Delete existing embeddings first to avoid duplicates
|
||||
// Use dynamic import
|
||||
const storage = await import('./storage.js');
|
||||
const storage = await import('../storage.js');
|
||||
await storage.deleteNoteEmbeddings(noteId, provider.name, config.model);
|
||||
|
||||
// Track successful and failed chunks in memory during this processing run
|
@ -1,4 +1,4 @@
|
||||
import type { NoteEmbeddingContext } from "./types.js";
|
||||
import type { NoteEmbeddingContext } from "../types.js";
|
||||
|
||||
/**
|
||||
* Interface for chunking operations
|
@ -8,7 +8,7 @@ import * as queue from './queue.js';
|
||||
import * as events from './events.js';
|
||||
import * as stats from './stats.js';
|
||||
import * as indexOperations from './index_operations.js';
|
||||
import { getChunkingOperations } from './chunking_interface.js';
|
||||
import { getChunkingOperations } from './chunking/chunking_interface.js';
|
||||
import type { NoteEmbeddingContext } from './types.js';
|
||||
|
||||
// Export types
|
||||
|
@ -7,7 +7,7 @@ import { getEnabledEmbeddingProviders } from "./providers.js";
|
||||
import { getNoteEmbeddingContext } from "./content_processing.js";
|
||||
import { deleteNoteEmbeddings } from "./storage.js";
|
||||
import type { QueueItem } from "./types.js";
|
||||
import { getChunkingOperations } from "./chunking_interface.js";
|
||||
import { getChunkingOperations } from "./chunking/chunking_interface.js";
|
||||
import indexService from '../index_service.js';
|
||||
|
||||
/**
|
||||
@ -289,10 +289,10 @@ export async function processEmbeddingQueue() {
|
||||
// This allows manual retries later
|
||||
if (noteData.attempts + 1 >= 3) {
|
||||
log.error(`Marked note ${noteData.noteId} as permanently failed after multiple embedding attempts`);
|
||||
|
||||
|
||||
// Update the attempts to a very high number to indicate permanent failure
|
||||
await sql.execute(`
|
||||
UPDATE embedding_queue
|
||||
UPDATE embedding_queue
|
||||
SET attempts = 999
|
||||
WHERE noteId = ?
|
||||
`, [noteData.noteId]);
|
||||
|
Loading…
x
Reference in New Issue
Block a user