mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-29 02:52:27 +08:00
well at least query decomposition is working..for now
This commit is contained in:
parent
5e50a2918d
commit
7062e51f2d
@ -24,306 +24,324 @@ import type { LLMServiceInterface } from '../../interfaces/agent_tool_interfaces
|
|||||||
|
|
||||||
// Options for context processing
|
// Options for context processing
|
||||||
export interface ContextOptions {
|
export interface ContextOptions {
|
||||||
// Content options
|
// Content options
|
||||||
summarizeContent?: boolean;
|
summarizeContent?: boolean;
|
||||||
maxResults?: number;
|
maxResults?: number;
|
||||||
contextNoteId?: string | null;
|
contextNoteId?: string | null;
|
||||||
|
|
||||||
// Processing options
|
// Processing options
|
||||||
useQueryEnhancement?: boolean;
|
useQueryEnhancement?: boolean;
|
||||||
useQueryDecomposition?: boolean;
|
useQueryDecomposition?: boolean;
|
||||||
|
|
||||||
// Debugging options
|
// Debugging options
|
||||||
showThinking?: boolean;
|
showThinking?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ContextService {
|
export class ContextService {
|
||||||
private initialized = false;
|
private initialized = false;
|
||||||
private initPromise: Promise<void> | null = null;
|
private initPromise: Promise<void> | null = null;
|
||||||
private contextExtractor: ContextExtractor;
|
private contextExtractor: ContextExtractor;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.contextExtractor = new ContextExtractor();
|
this.contextExtractor = new ContextExtractor();
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize the service
|
|
||||||
*/
|
|
||||||
async initialize(): Promise<void> {
|
|
||||||
if (this.initialized) return;
|
|
||||||
|
|
||||||
// Use a promise to prevent multiple simultaneous initializations
|
|
||||||
if (this.initPromise) return this.initPromise;
|
|
||||||
|
|
||||||
this.initPromise = (async () => {
|
|
||||||
try {
|
|
||||||
// Initialize provider
|
|
||||||
const provider = await providerManager.getPreferredEmbeddingProvider();
|
|
||||||
if (!provider) {
|
|
||||||
throw new Error(`No embedding provider available. Could not initialize context service.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Agent tools are already initialized in the AIServiceManager constructor
|
|
||||||
// No need to initialize them again
|
|
||||||
|
|
||||||
this.initialized = true;
|
|
||||||
log.info(`Context service initialized with provider: ${provider.name}`);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
||||||
log.error(`Failed to initialize context service: ${errorMessage}`);
|
|
||||||
throw error;
|
|
||||||
} finally {
|
|
||||||
this.initPromise = null;
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
return this.initPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Process a user query to find relevant context in Trilium notes
|
|
||||||
*
|
|
||||||
* @param userQuestion - The user's query
|
|
||||||
* @param llmService - The LLM service to use
|
|
||||||
* @param options - Context processing options
|
|
||||||
* @returns Context information and relevant notes
|
|
||||||
*/
|
|
||||||
async processQuery(
|
|
||||||
userQuestion: string,
|
|
||||||
llmService: LLMServiceInterface,
|
|
||||||
options: ContextOptions = {}
|
|
||||||
): Promise<{
|
|
||||||
context: string;
|
|
||||||
sources: NoteSearchResult[];
|
|
||||||
thinking?: string;
|
|
||||||
decomposedQuery?: any;
|
|
||||||
}> {
|
|
||||||
// Set default options
|
|
||||||
const {
|
|
||||||
summarizeContent = false,
|
|
||||||
maxResults = 10,
|
|
||||||
contextNoteId = null,
|
|
||||||
useQueryEnhancement = true,
|
|
||||||
useQueryDecomposition = false,
|
|
||||||
showThinking = false
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
log.info(`Processing query: "${userQuestion.substring(0, 50)}..."`);
|
|
||||||
log.info(`Options: summarize=${summarizeContent}, maxResults=${maxResults}, contextNoteId=${contextNoteId || 'global'}`);
|
|
||||||
log.info(`Processing: enhancement=${useQueryEnhancement}, decomposition=${useQueryDecomposition}, showThinking=${showThinking}`);
|
|
||||||
|
|
||||||
if (!this.initialized) {
|
|
||||||
try {
|
|
||||||
await this.initialize();
|
|
||||||
} catch (error) {
|
|
||||||
log.error(`Failed to initialize ContextService: ${error}`);
|
|
||||||
// Return a fallback response if initialization fails
|
|
||||||
return {
|
|
||||||
context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT,
|
|
||||||
sources: [],
|
|
||||||
thinking: undefined
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
/**
|
||||||
let decomposedQuery;
|
* Initialize the service
|
||||||
let searchQueries: string[] = [userQuestion];
|
*/
|
||||||
let relevantNotes: NoteSearchResult[] = [];
|
async initialize(): Promise<void> {
|
||||||
|
if (this.initialized) return;
|
||||||
|
|
||||||
// Step 1: Decompose query if requested
|
// Use a promise to prevent multiple simultaneous initializations
|
||||||
if (useQueryDecomposition) {
|
if (this.initPromise) return this.initPromise;
|
||||||
log.info(`Decomposing query for better understanding`);
|
|
||||||
decomposedQuery = queryProcessor.decomposeQuery(userQuestion);
|
|
||||||
|
|
||||||
// Extract sub-queries to use for search
|
this.initPromise = (async () => {
|
||||||
if (decomposedQuery.subQueries.length > 0) {
|
try {
|
||||||
searchQueries = decomposedQuery.subQueries
|
// Initialize provider
|
||||||
.map(sq => sq.text)
|
const provider = await providerManager.getPreferredEmbeddingProvider();
|
||||||
.filter(text => text !== userQuestion); // Remove the original query to avoid duplication
|
if (!provider) {
|
||||||
|
throw new Error(`No embedding provider available. Could not initialize context service.`);
|
||||||
|
}
|
||||||
|
|
||||||
// Always include the original query
|
// Agent tools are already initialized in the AIServiceManager constructor
|
||||||
searchQueries.unshift(userQuestion);
|
// No need to initialize them again
|
||||||
|
|
||||||
log.info(`Query decomposed into ${searchQueries.length} search queries`);
|
this.initialized = true;
|
||||||
|
log.info(`Context service initialized with provider: ${provider.name}`);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
log.error(`Failed to initialize context service: ${errorMessage}`);
|
||||||
|
throw error;
|
||||||
|
} finally {
|
||||||
|
this.initPromise = null;
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
return this.initPromise;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process a user query to find relevant context in Trilium notes
|
||||||
|
*
|
||||||
|
* @param userQuestion - The user's query
|
||||||
|
* @param llmService - The LLM service to use
|
||||||
|
* @param options - Context processing options
|
||||||
|
* @returns Context information and relevant notes
|
||||||
|
*/
|
||||||
|
async processQuery(
|
||||||
|
userQuestion: string,
|
||||||
|
llmService: LLMServiceInterface,
|
||||||
|
options: ContextOptions = {}
|
||||||
|
): Promise<{
|
||||||
|
context: string;
|
||||||
|
sources: NoteSearchResult[];
|
||||||
|
thinking?: string;
|
||||||
|
decomposedQuery?: any;
|
||||||
|
}> {
|
||||||
|
// Set default options
|
||||||
|
const {
|
||||||
|
summarizeContent = false,
|
||||||
|
maxResults = 10,
|
||||||
|
contextNoteId = null,
|
||||||
|
useQueryEnhancement = true,
|
||||||
|
useQueryDecomposition = false,
|
||||||
|
showThinking = false
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
log.info(`Processing query: "${userQuestion.substring(0, 50)}..."`);
|
||||||
|
log.info(`Options: summarize=${summarizeContent}, maxResults=${maxResults}, contextNoteId=${contextNoteId || 'global'}`);
|
||||||
|
log.info(`Processing: enhancement=${useQueryEnhancement}, decomposition=${useQueryDecomposition}, showThinking=${showThinking}`);
|
||||||
|
|
||||||
|
if (!this.initialized) {
|
||||||
|
try {
|
||||||
|
await this.initialize();
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Failed to initialize ContextService: ${error}`);
|
||||||
|
// Return a fallback response if initialization fails
|
||||||
|
return {
|
||||||
|
context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT,
|
||||||
|
sources: [],
|
||||||
|
thinking: undefined
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
// Step 2: Or enhance query if requested
|
|
||||||
else if (useQueryEnhancement) {
|
|
||||||
try {
|
try {
|
||||||
log.info(`Enhancing query for better semantic matching`);
|
let decomposedQuery;
|
||||||
searchQueries = await queryProcessor.generateSearchQueries(userQuestion, llmService);
|
let searchQueries: string[] = [userQuestion];
|
||||||
log.info(`Generated ${searchQueries.length} enhanced search queries`);
|
let relevantNotes: NoteSearchResult[] = [];
|
||||||
|
|
||||||
|
// Step 1: Decompose query if requested
|
||||||
|
if (useQueryDecomposition) {
|
||||||
|
log.info(`Decomposing query for better understanding`);
|
||||||
|
try {
|
||||||
|
// Use the async version with the LLM service
|
||||||
|
decomposedQuery = await queryProcessor.decomposeQuery(userQuestion, undefined, llmService);
|
||||||
|
log.info(`Successfully decomposed query complexity: ${decomposedQuery.complexity}/10 with ${decomposedQuery.subQueries.length} sub-queries`);
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Error in query decomposition, using fallback: ${error}`);
|
||||||
|
// Fallback to simpler decomposition
|
||||||
|
decomposedQuery = {
|
||||||
|
originalQuery: userQuestion,
|
||||||
|
subQueries: [{
|
||||||
|
id: `sq_fallback_${Date.now()}`,
|
||||||
|
text: userQuestion,
|
||||||
|
reason: "Fallback to original query due to decomposition error",
|
||||||
|
isAnswered: false
|
||||||
|
}],
|
||||||
|
status: 'pending',
|
||||||
|
complexity: 1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract sub-queries to use for search
|
||||||
|
if (decomposedQuery.subQueries.length > 0) {
|
||||||
|
searchQueries = decomposedQuery.subQueries
|
||||||
|
.map(sq => sq.text)
|
||||||
|
.filter(text => text !== userQuestion); // Remove the original query to avoid duplication
|
||||||
|
|
||||||
|
// Always include the original query
|
||||||
|
searchQueries.unshift(userQuestion);
|
||||||
|
|
||||||
|
log.info(`Query decomposed into ${searchQueries.length} search queries`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Step 2: Or enhance query if requested
|
||||||
|
else if (useQueryEnhancement) {
|
||||||
|
try {
|
||||||
|
log.info(`Enhancing query for better semantic matching`);
|
||||||
|
searchQueries = await queryProcessor.generateSearchQueries(userQuestion, llmService);
|
||||||
|
log.info(`Generated ${searchQueries.length} enhanced search queries`);
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Error generating search queries, using fallback: ${error}`);
|
||||||
|
searchQueries = [userQuestion]; // Fallback to using the original question
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: Find relevant notes using vector search
|
||||||
|
const allResults = new Map<string, NoteSearchResult>();
|
||||||
|
|
||||||
|
for (const query of searchQueries) {
|
||||||
|
try {
|
||||||
|
log.info(`Searching for: "${query.substring(0, 50)}..."`);
|
||||||
|
|
||||||
|
// Use the unified vector search service
|
||||||
|
const results = await vectorSearchService.findRelevantNotes(
|
||||||
|
query,
|
||||||
|
contextNoteId,
|
||||||
|
{
|
||||||
|
maxResults: maxResults,
|
||||||
|
summarizeContent: summarizeContent,
|
||||||
|
llmService: summarizeContent ? llmService : null
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
log.info(`Found ${results.length} results for query "${query.substring(0, 30)}..."`);
|
||||||
|
|
||||||
|
// Combine results, avoiding duplicates
|
||||||
|
for (const result of results) {
|
||||||
|
if (!allResults.has(result.noteId)) {
|
||||||
|
allResults.set(result.noteId, result);
|
||||||
|
} else {
|
||||||
|
// If note already exists, update similarity to max of both values
|
||||||
|
const existing = allResults.get(result.noteId);
|
||||||
|
if (existing && result.similarity > existing.similarity) {
|
||||||
|
existing.similarity = result.similarity;
|
||||||
|
allResults.set(result.noteId, existing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Error searching for query "${query}": ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to array and sort by similarity
|
||||||
|
relevantNotes = Array.from(allResults.values())
|
||||||
|
.sort((a, b) => b.similarity - a.similarity)
|
||||||
|
.slice(0, maxResults);
|
||||||
|
|
||||||
|
log.info(`Final combined results: ${relevantNotes.length} relevant notes`);
|
||||||
|
|
||||||
|
// Step 4: Build context from the notes
|
||||||
|
const provider = await providerManager.getPreferredEmbeddingProvider();
|
||||||
|
const providerId = provider?.name || 'default';
|
||||||
|
|
||||||
|
const context = await contextFormatter.buildContextFromNotes(
|
||||||
|
relevantNotes,
|
||||||
|
userQuestion,
|
||||||
|
providerId
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 5: Add agent tools context if requested
|
||||||
|
let enhancedContext = context;
|
||||||
|
let thinkingProcess: string | undefined = undefined;
|
||||||
|
|
||||||
|
if (showThinking) {
|
||||||
|
thinkingProcess = this.generateThinkingProcess(
|
||||||
|
userQuestion,
|
||||||
|
searchQueries,
|
||||||
|
relevantNotes,
|
||||||
|
decomposedQuery
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
context: enhancedContext,
|
||||||
|
sources: relevantNotes,
|
||||||
|
thinking: thinkingProcess,
|
||||||
|
decomposedQuery
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(`Error generating search queries, using fallback: ${error}`);
|
log.error(`Error processing query: ${error}`);
|
||||||
searchQueries = [userQuestion]; // Fallback to using the original question
|
return {
|
||||||
|
context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT,
|
||||||
|
sources: [],
|
||||||
|
thinking: undefined
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Step 3: Find relevant notes using vector search
|
/**
|
||||||
const allResults = new Map<string, NoteSearchResult>();
|
* Generate a thinking process for debugging and transparency
|
||||||
|
*/
|
||||||
|
private generateThinkingProcess(
|
||||||
|
originalQuery: string,
|
||||||
|
searchQueries: string[],
|
||||||
|
relevantNotes: NoteSearchResult[],
|
||||||
|
decomposedQuery?: any
|
||||||
|
): string {
|
||||||
|
let thinking = `## Query Processing\n\n`;
|
||||||
|
thinking += `Original query: "${originalQuery}"\n\n`;
|
||||||
|
|
||||||
for (const query of searchQueries) {
|
// Add decomposition analysis if available
|
||||||
try {
|
if (decomposedQuery) {
|
||||||
log.info(`Searching for: "${query.substring(0, 50)}..."`);
|
thinking += `Query complexity: ${decomposedQuery.complexity}/10\n\n`;
|
||||||
|
thinking += `### Decomposed into ${decomposedQuery.subQueries.length} sub-queries:\n`;
|
||||||
|
|
||||||
// Use the unified vector search service
|
decomposedQuery.subQueries.forEach((sq: any, i: number) => {
|
||||||
const results = await vectorSearchService.findRelevantNotes(
|
thinking += `${i + 1}. ${sq.text}\n Reason: ${sq.reason}\n\n`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add search queries
|
||||||
|
thinking += `### Search Queries Used:\n`;
|
||||||
|
searchQueries.forEach((q, i) => {
|
||||||
|
thinking += `${i + 1}. "${q}"\n`;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add found sources
|
||||||
|
thinking += `\n## Sources Retrieved (${relevantNotes.length})\n\n`;
|
||||||
|
|
||||||
|
relevantNotes.slice(0, 5).forEach((note, i) => {
|
||||||
|
thinking += `${i + 1}. "${note.title}" (Score: ${Math.round(note.similarity * 100)}%)\n`;
|
||||||
|
thinking += ` ID: ${note.noteId}\n`;
|
||||||
|
|
||||||
|
// Check if parentPath exists before using it
|
||||||
|
if ('parentPath' in note && note.parentPath) {
|
||||||
|
thinking += ` Path: ${note.parentPath}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (note.content) {
|
||||||
|
const contentPreview = note.content.length > 100
|
||||||
|
? note.content.substring(0, 100) + '...'
|
||||||
|
: note.content;
|
||||||
|
thinking += ` Preview: ${contentPreview}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
thinking += '\n';
|
||||||
|
});
|
||||||
|
|
||||||
|
if (relevantNotes.length > 5) {
|
||||||
|
thinking += `... and ${relevantNotes.length - 5} more sources\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return thinking;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find notes semantically related to a query
|
||||||
|
* (Shorthand method that directly uses vectorSearchService)
|
||||||
|
*/
|
||||||
|
async findRelevantNotes(
|
||||||
|
query: string,
|
||||||
|
contextNoteId: string | null = null,
|
||||||
|
options: {
|
||||||
|
maxResults?: number,
|
||||||
|
summarize?: boolean,
|
||||||
|
llmService?: LLMServiceInterface | null
|
||||||
|
} = {}
|
||||||
|
): Promise<NoteSearchResult[]> {
|
||||||
|
return vectorSearchService.findRelevantNotes(
|
||||||
query,
|
query,
|
||||||
contextNoteId,
|
contextNoteId,
|
||||||
{
|
{
|
||||||
maxResults: maxResults,
|
maxResults: options.maxResults,
|
||||||
summarizeContent: summarizeContent,
|
summarizeContent: options.summarize,
|
||||||
llmService: summarizeContent ? llmService : null
|
llmService: options.llmService
|
||||||
}
|
}
|
||||||
);
|
|
||||||
|
|
||||||
log.info(`Found ${results.length} results for query "${query.substring(0, 30)}..."`);
|
|
||||||
|
|
||||||
// Combine results, avoiding duplicates
|
|
||||||
for (const result of results) {
|
|
||||||
if (!allResults.has(result.noteId)) {
|
|
||||||
allResults.set(result.noteId, result);
|
|
||||||
} else {
|
|
||||||
// If note already exists, update similarity to max of both values
|
|
||||||
const existing = allResults.get(result.noteId);
|
|
||||||
if (existing && result.similarity > existing.similarity) {
|
|
||||||
existing.similarity = result.similarity;
|
|
||||||
allResults.set(result.noteId, existing);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
log.error(`Error searching for query "${query}": ${error}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to array and sort by similarity
|
|
||||||
relevantNotes = Array.from(allResults.values())
|
|
||||||
.sort((a, b) => b.similarity - a.similarity)
|
|
||||||
.slice(0, maxResults);
|
|
||||||
|
|
||||||
log.info(`Final combined results: ${relevantNotes.length} relevant notes`);
|
|
||||||
|
|
||||||
// Step 4: Build context from the notes
|
|
||||||
const provider = await providerManager.getPreferredEmbeddingProvider();
|
|
||||||
const providerId = provider?.name || 'default';
|
|
||||||
|
|
||||||
const context = await contextFormatter.buildContextFromNotes(
|
|
||||||
relevantNotes,
|
|
||||||
userQuestion,
|
|
||||||
providerId
|
|
||||||
);
|
|
||||||
|
|
||||||
// Step 5: Add agent tools context if requested
|
|
||||||
let enhancedContext = context;
|
|
||||||
let thinkingProcess: string | undefined = undefined;
|
|
||||||
|
|
||||||
if (showThinking) {
|
|
||||||
thinkingProcess = this.generateThinkingProcess(
|
|
||||||
userQuestion,
|
|
||||||
searchQueries,
|
|
||||||
relevantNotes,
|
|
||||||
decomposedQuery
|
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
context: enhancedContext,
|
|
||||||
sources: relevantNotes,
|
|
||||||
thinking: thinkingProcess,
|
|
||||||
decomposedQuery
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
log.error(`Error processing query: ${error}`);
|
|
||||||
return {
|
|
||||||
context: CONTEXT_PROMPTS.NO_NOTES_CONTEXT,
|
|
||||||
sources: [],
|
|
||||||
thinking: undefined
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate a thinking process for debugging and transparency
|
|
||||||
*/
|
|
||||||
private generateThinkingProcess(
|
|
||||||
originalQuery: string,
|
|
||||||
searchQueries: string[],
|
|
||||||
relevantNotes: NoteSearchResult[],
|
|
||||||
decomposedQuery?: any
|
|
||||||
): string {
|
|
||||||
let thinking = `## Query Processing\n\n`;
|
|
||||||
thinking += `Original query: "${originalQuery}"\n\n`;
|
|
||||||
|
|
||||||
// Add decomposition analysis if available
|
|
||||||
if (decomposedQuery) {
|
|
||||||
thinking += `Query complexity: ${decomposedQuery.complexity}/10\n\n`;
|
|
||||||
thinking += `### Decomposed into ${decomposedQuery.subQueries.length} sub-queries:\n`;
|
|
||||||
|
|
||||||
decomposedQuery.subQueries.forEach((sq: any, i: number) => {
|
|
||||||
thinking += `${i+1}. ${sq.text}\n Reason: ${sq.reason}\n\n`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add search queries
|
|
||||||
thinking += `### Search Queries Used:\n`;
|
|
||||||
searchQueries.forEach((q, i) => {
|
|
||||||
thinking += `${i+1}. "${q}"\n`;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add found sources
|
|
||||||
thinking += `\n## Sources Retrieved (${relevantNotes.length})\n\n`;
|
|
||||||
|
|
||||||
relevantNotes.slice(0, 5).forEach((note, i) => {
|
|
||||||
thinking += `${i+1}. "${note.title}" (Score: ${Math.round(note.similarity * 100)}%)\n`;
|
|
||||||
thinking += ` ID: ${note.noteId}\n`;
|
|
||||||
|
|
||||||
// Check if parentPath exists before using it
|
|
||||||
if ('parentPath' in note && note.parentPath) {
|
|
||||||
thinking += ` Path: ${note.parentPath}\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (note.content) {
|
|
||||||
const contentPreview = note.content.length > 100
|
|
||||||
? note.content.substring(0, 100) + '...'
|
|
||||||
: note.content;
|
|
||||||
thinking += ` Preview: ${contentPreview}\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
thinking += '\n';
|
|
||||||
});
|
|
||||||
|
|
||||||
if (relevantNotes.length > 5) {
|
|
||||||
thinking += `... and ${relevantNotes.length - 5} more sources\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return thinking;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find notes semantically related to a query
|
|
||||||
* (Shorthand method that directly uses vectorSearchService)
|
|
||||||
*/
|
|
||||||
async findRelevantNotes(
|
|
||||||
query: string,
|
|
||||||
contextNoteId: string | null = null,
|
|
||||||
options: {
|
|
||||||
maxResults?: number,
|
|
||||||
summarize?: boolean,
|
|
||||||
llmService?: LLMServiceInterface | null
|
|
||||||
} = {}
|
|
||||||
): Promise<NoteSearchResult[]> {
|
|
||||||
return vectorSearchService.findRelevantNotes(
|
|
||||||
query,
|
|
||||||
contextNoteId,
|
|
||||||
{
|
|
||||||
maxResults: options.maxResults,
|
|
||||||
summarizeContent: options.summarize,
|
|
||||||
llmService: options.llmService
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export a singleton instance
|
// Export a singleton instance
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -12,83 +12,129 @@ import type { SubQuery, DecomposedQuery } from '../context/services/query_proces
|
|||||||
export type { SubQuery, DecomposedQuery };
|
export type { SubQuery, DecomposedQuery };
|
||||||
|
|
||||||
export class QueryDecompositionTool {
|
export class QueryDecompositionTool {
|
||||||
/**
|
/**
|
||||||
* Break down a complex query into smaller, more manageable sub-queries
|
* Break down a complex query into smaller, more manageable sub-queries
|
||||||
*
|
*
|
||||||
* @param query The original user query
|
* @param query The original user query
|
||||||
* @param context Optional context about the current note being viewed
|
* @param context Optional context about the current note being viewed
|
||||||
* @returns A decomposed query object with sub-queries
|
* @returns A decomposed query object with sub-queries
|
||||||
*/
|
*/
|
||||||
decomposeQuery(query: string, context?: string): DecomposedQuery {
|
decomposeQuery(query: string, context?: string): DecomposedQuery {
|
||||||
log.info('Using compatibility layer for QueryDecompositionTool.decomposeQuery');
|
log.info('Using compatibility layer for QueryDecompositionTool.decomposeQuery');
|
||||||
return queryProcessor.decomposeQuery(query, context);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
// Since the main implementation is now async but we need to maintain a sync interface,
|
||||||
* Update a sub-query with its answer
|
// we'll use a simpler approach that doesn't require LLM
|
||||||
*
|
|
||||||
* @param decomposedQuery The decomposed query object
|
|
||||||
* @param subQueryId The ID of the sub-query to update
|
|
||||||
* @param answer The answer to the sub-query
|
|
||||||
* @returns The updated decomposed query
|
|
||||||
*/
|
|
||||||
updateSubQueryAnswer(
|
|
||||||
decomposedQuery: DecomposedQuery,
|
|
||||||
subQueryId: string,
|
|
||||||
answer: string
|
|
||||||
): DecomposedQuery {
|
|
||||||
log.info('Using compatibility layer for QueryDecompositionTool.updateSubQueryAnswer');
|
|
||||||
return queryProcessor.updateSubQueryAnswer(decomposedQuery, subQueryId, answer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
// Get the complexity to determine approach
|
||||||
* Synthesize all sub-query answers into a comprehensive response
|
const complexity = queryProcessor.assessQueryComplexity(query);
|
||||||
*
|
|
||||||
* @param decomposedQuery The decomposed query with all sub-queries answered
|
|
||||||
* @returns A synthesized answer to the original query
|
|
||||||
*/
|
|
||||||
synthesizeAnswer(decomposedQuery: DecomposedQuery): string {
|
|
||||||
log.info('Using compatibility layer for QueryDecompositionTool.synthesizeAnswer');
|
|
||||||
return queryProcessor.synthesizeAnswer(decomposedQuery);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
if (!query || query.trim().length === 0) {
|
||||||
* Generate a status report on the progress of answering a complex query
|
return {
|
||||||
*
|
originalQuery: query,
|
||||||
* @param decomposedQuery The decomposed query
|
subQueries: [],
|
||||||
* @returns A status report string
|
status: 'pending',
|
||||||
*/
|
complexity: 0
|
||||||
getQueryStatus(decomposedQuery: DecomposedQuery): string {
|
};
|
||||||
log.info('Using compatibility layer for QueryDecompositionTool.getQueryStatus');
|
}
|
||||||
// This method doesn't exist directly in the new implementation
|
|
||||||
// We'll implement a simple fallback
|
|
||||||
|
|
||||||
const answeredCount = decomposedQuery.subQueries.filter(sq => sq.isAnswered).length;
|
// Create a baseline decomposed query
|
||||||
const totalCount = decomposedQuery.subQueries.length;
|
let subQueries = [];
|
||||||
|
|
||||||
let status = `Progress: ${answeredCount}/${totalCount} sub-queries answered\n\n`;
|
// For compatibility, we'll use the basic SubQuery generation
|
||||||
|
// This avoids the async LLM call which would break the sync interface
|
||||||
|
const mainSubQuery = {
|
||||||
|
id: `sq_${Date.now()}_sync_0`,
|
||||||
|
text: query,
|
||||||
|
reason: "Main question (for direct matching)",
|
||||||
|
isAnswered: false
|
||||||
|
};
|
||||||
|
|
||||||
for (const sq of decomposedQuery.subQueries) {
|
subQueries.push(mainSubQuery);
|
||||||
status += `${sq.isAnswered ? '✓' : '○'} ${sq.text}\n`;
|
|
||||||
if (sq.isAnswered && sq.answer) {
|
// Add a generic exploration query for context
|
||||||
status += `Answer: ${sq.answer.substring(0, 100)}${sq.answer.length > 100 ? '...' : ''}\n`;
|
const genericQuery = {
|
||||||
}
|
id: `sq_${Date.now()}_sync_1`,
|
||||||
status += '\n';
|
text: `What information is related to ${query}?`,
|
||||||
|
reason: "General exploration to find related content",
|
||||||
|
isAnswered: false
|
||||||
|
};
|
||||||
|
|
||||||
|
subQueries.push(genericQuery);
|
||||||
|
|
||||||
|
// Simplified implementation that doesn't require async/LLM calls
|
||||||
|
return {
|
||||||
|
originalQuery: query,
|
||||||
|
subQueries: subQueries,
|
||||||
|
status: 'pending',
|
||||||
|
complexity
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return status;
|
/**
|
||||||
}
|
* Update a sub-query with its answer
|
||||||
|
*
|
||||||
|
* @param decomposedQuery The decomposed query object
|
||||||
|
* @param subQueryId The ID of the sub-query to update
|
||||||
|
* @param answer The answer to the sub-query
|
||||||
|
* @returns The updated decomposed query
|
||||||
|
*/
|
||||||
|
updateSubQueryAnswer(
|
||||||
|
decomposedQuery: DecomposedQuery,
|
||||||
|
subQueryId: string,
|
||||||
|
answer: string
|
||||||
|
): DecomposedQuery {
|
||||||
|
log.info('Using compatibility layer for QueryDecompositionTool.updateSubQueryAnswer');
|
||||||
|
return queryProcessor.updateSubQueryAnswer(decomposedQuery, subQueryId, answer);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assess the complexity of a query on a scale of 1-10
|
* Synthesize all sub-query answers into a comprehensive response
|
||||||
*
|
*
|
||||||
* @param query The query to assess
|
* @param decomposedQuery The decomposed query with all sub-queries answered
|
||||||
* @returns A complexity score from 1-10
|
* @returns A synthesized answer to the original query
|
||||||
*/
|
*/
|
||||||
assessQueryComplexity(query: string): number {
|
synthesizeAnswer(decomposedQuery: DecomposedQuery): string {
|
||||||
log.info('Using compatibility layer for QueryDecompositionTool.assessQueryComplexity');
|
log.info('Using compatibility layer for QueryDecompositionTool.synthesizeAnswer');
|
||||||
return queryProcessor.assessQueryComplexity(query);
|
return queryProcessor.synthesizeAnswer(decomposedQuery);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a status report on the progress of answering a complex query
|
||||||
|
*
|
||||||
|
* @param decomposedQuery The decomposed query
|
||||||
|
* @returns A status report string
|
||||||
|
*/
|
||||||
|
getQueryStatus(decomposedQuery: DecomposedQuery): string {
|
||||||
|
log.info('Using compatibility layer for QueryDecompositionTool.getQueryStatus');
|
||||||
|
// This method doesn't exist directly in the new implementation
|
||||||
|
// We'll implement a simple fallback
|
||||||
|
|
||||||
|
const answeredCount = decomposedQuery.subQueries.filter(sq => sq.isAnswered).length;
|
||||||
|
const totalCount = decomposedQuery.subQueries.length;
|
||||||
|
|
||||||
|
let status = `Progress: ${answeredCount}/${totalCount} sub-queries answered\n\n`;
|
||||||
|
|
||||||
|
for (const sq of decomposedQuery.subQueries) {
|
||||||
|
status += `${sq.isAnswered ? '✓' : '○'} ${sq.text}\n`;
|
||||||
|
if (sq.isAnswered && sq.answer) {
|
||||||
|
status += `Answer: ${sq.answer.substring(0, 100)}${sq.answer.length > 100 ? '...' : ''}\n`;
|
||||||
|
}
|
||||||
|
status += '\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assess the complexity of a query on a scale of 1-10
|
||||||
|
*
|
||||||
|
* @param query The query to assess
|
||||||
|
* @returns A complexity score from 1-10
|
||||||
|
*/
|
||||||
|
assessQueryComplexity(query: string): number {
|
||||||
|
log.info('Using compatibility layer for QueryDecompositionTool.assessQueryComplexity');
|
||||||
|
return queryProcessor.assessQueryComplexity(query);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Export default instance for compatibility
|
// Export default instance for compatibility
|
||||||
|
@ -19,13 +19,6 @@ export interface LLMServiceInterface {
|
|||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
}): Promise<ChatResponse>;
|
}): Promise<ChatResponse>;
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate search queries by decomposing a complex query into simpler ones
|
|
||||||
* @param query The original user query to decompose
|
|
||||||
* @returns An array of decomposed search queries
|
|
||||||
*/
|
|
||||||
generateSearchQueries?(query: string): Promise<string[]>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -168,16 +168,28 @@ export class ChatPipeline {
|
|||||||
log.info(`========== STAGE 2: QUERY DECOMPOSITION ==========`);
|
log.info(`========== STAGE 2: QUERY DECOMPOSITION ==========`);
|
||||||
log.info('Performing query decomposition to generate effective search queries');
|
log.info('Performing query decomposition to generate effective search queries');
|
||||||
const llmService = await this.getLLMService();
|
const llmService = await this.getLLMService();
|
||||||
let searchQueries = [userQuery]; // Default to original query
|
let searchQueries = [userQuery];
|
||||||
|
|
||||||
if (llmService && llmService.generateSearchQueries) {
|
if (llmService) {
|
||||||
try {
|
try {
|
||||||
const decompositionResult = await llmService.generateSearchQueries(userQuery);
|
// Import the query processor and use its decomposeQuery method
|
||||||
if (decompositionResult && decompositionResult.length > 0) {
|
const queryProcessor = (await import('../context/services/query_processor.js')).default;
|
||||||
searchQueries = decompositionResult;
|
|
||||||
log.info(`Generated ${searchQueries.length} search queries: ${JSON.stringify(searchQueries)}`);
|
// Use the enhanced query processor with the LLM service
|
||||||
|
const decomposedQuery = await queryProcessor.decomposeQuery(userQuery, undefined, llmService);
|
||||||
|
|
||||||
|
if (decomposedQuery && decomposedQuery.subQueries && decomposedQuery.subQueries.length > 0) {
|
||||||
|
// Extract search queries from the decomposed query
|
||||||
|
searchQueries = decomposedQuery.subQueries.map(sq => sq.text);
|
||||||
|
|
||||||
|
// Always include the original query if it's not already included
|
||||||
|
if (!searchQueries.includes(userQuery)) {
|
||||||
|
searchQueries.unshift(userQuery);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info(`Query decomposed with complexity ${decomposedQuery.complexity}/10 into ${searchQueries.length} search queries`);
|
||||||
} else {
|
} else {
|
||||||
log.info('Query decomposition returned no results, using original query');
|
log.info('Query decomposition returned no sub-queries, using original query');
|
||||||
}
|
}
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
log.error(`Error in query decomposition: ${error.message || String(error)}`);
|
log.error(`Error in query decomposition: ${error.message || String(error)}`);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user