diff --git a/apps/client/src/services/glob.ts b/apps/client/src/services/glob.ts
index 75ed6fd55..6e261b4c8 100644
--- a/apps/client/src/services/glob.ts
+++ b/apps/client/src/services/glob.ts
@@ -26,12 +26,18 @@ function setupGlobs() {
window.onerror = function (msg, url, lineNo, columnNo, error) {
const string = String(msg).toLowerCase();
+ let errorObjectString = "";
+ try {
+ errorObjectString = JSON.stringify(error);
+ } catch (e: any) {
+ errorObjectString = e.toString();
+ }
let message = "Uncaught error: ";
if (string.includes("script error")) {
message += "No details available";
} else {
- message += [`Message: ${msg}`, `URL: ${url}`, `Line: ${lineNo}`, `Column: ${columnNo}`, `Error object: ${JSON.stringify(error)}`, `Stack: ${error && error.stack}`].join(", ");
+ message += [`Message: ${msg}`, `URL: ${url}`, `Line: ${lineNo}`, `Column: ${columnNo}`, `Error object: ${errorObjectString}`, `Stack: ${error && error.stack}`].join(", ");
}
ws.logError(message);
diff --git a/apps/client/src/translations/en/translation.json b/apps/client/src/translations/en/translation.json
index 5d2e03ec5..44b50445b 100644
--- a/apps/client/src/translations/en/translation.json
+++ b/apps/client/src/translations/en/translation.json
@@ -1260,6 +1260,7 @@
},
"create_new_ai_chat": "Create new AI Chat",
"configuration_warnings": "There are some issues with your AI configuration. Please check your settings.",
+ "experimental_warning": "The LLM feature is currently experimental - you have been warned.",
"selected_provider": "Selected Provider",
"selected_provider_description": "Choose the AI provider for chat and completion features",
"select_model": "Select model...",
diff --git a/apps/client/src/widgets/llm_chat/llm_chat_panel.ts b/apps/client/src/widgets/llm_chat/llm_chat_panel.ts
index be401031f..3187018cf 100644
--- a/apps/client/src/widgets/llm_chat/llm_chat_panel.ts
+++ b/apps/client/src/widgets/llm_chat/llm_chat_panel.ts
@@ -350,6 +350,115 @@ export default class LlmChatPanel extends BasicWidget {
}
}
+ /**
+ * Save current chat data to a specific note ID
+ */
+ async saveCurrentDataToSpecificNote(targetNoteId: string | null) {
+ if (!this.onSaveData || !targetNoteId) {
+ console.warn('Cannot save chat data: no saveData callback or no targetNoteId available');
+ return;
+ }
+
+ try {
+ // Extract current tool execution steps if any exist
+ const toolSteps = extractInChatToolSteps(this.noteContextChatMessages);
+
+ // Get tool executions from both UI and any cached executions in metadata
+ let toolExecutions: Array<{
+ id: string;
+ name: string;
+ arguments: any;
+ result: any;
+ error?: string;
+ timestamp: string;
+ }> = [];
+
+ // First include any tool executions already in metadata (from streaming events)
+ if (this.metadata?.toolExecutions && Array.isArray(this.metadata.toolExecutions)) {
+ toolExecutions = [...this.metadata.toolExecutions];
+ console.log(`Including ${toolExecutions.length} tool executions from metadata`);
+ }
+
+ // Also extract any visible tool steps from the UI
+ const extractedExecutions = toolSteps.map(step => {
+ // Parse tool execution information
+ if (step.type === 'tool-execution') {
+ try {
+ const content = JSON.parse(step.content);
+ return {
+ id: content.toolCallId || `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
+ name: content.tool || 'unknown',
+ arguments: content.args || {},
+ result: content.result || {},
+ error: content.error,
+ timestamp: new Date().toISOString()
+ };
+ } catch (e) {
+ // If we can't parse it, create a basic record
+ return {
+ id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
+ name: 'unknown',
+ arguments: {},
+ result: step.content,
+ timestamp: new Date().toISOString()
+ };
+ }
+ } else if (step.type === 'result' && step.name) {
+ // Handle result steps with a name
+ return {
+ id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
+ name: step.name,
+ arguments: {},
+ result: step.content,
+ timestamp: new Date().toISOString()
+ };
+ }
+ return {
+ id: `tool-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`,
+ name: 'unknown',
+ arguments: {},
+ result: 'Unrecognized tool step',
+ timestamp: new Date().toISOString()
+ };
+ });
+
+ // Merge the tool executions, keeping only unique IDs
+ const existingIds = new Set(toolExecutions.map((t: {id: string}) => t.id));
+ for (const exec of extractedExecutions) {
+ if (!existingIds.has(exec.id)) {
+ toolExecutions.push(exec);
+ existingIds.add(exec.id);
+ }
+ }
+
+ const dataToSave = {
+ messages: this.messages,
+ noteId: targetNoteId,
+ chatNoteId: targetNoteId, // For backward compatibility
+ toolSteps: toolSteps,
+ // Add sources if we have them
+ sources: this.sources || [],
+ // Add metadata
+ metadata: {
+ model: this.metadata?.model || undefined,
+ provider: this.metadata?.provider || undefined,
+ temperature: this.metadata?.temperature || 0.7,
+ lastUpdated: new Date().toISOString(),
+ // Add tool executions
+ toolExecutions: toolExecutions
+ }
+ };
+
+ console.log(`Saving chat data to specific note ${targetNoteId}, ${toolSteps.length} tool steps, ${this.sources?.length || 0} sources, ${toolExecutions.length} tool executions`);
+
+ // Save the data to the note attribute via the callback
+ // This is the ONLY place we should save data, letting the container widget handle persistence
+ await this.onSaveData(dataToSave);
+ } catch (error) {
+ console.error('Error saving chat data to specific note:', error);
+ }
+ }
+
/**
* Load saved chat data from the note attribute
*/
@@ -867,8 +976,8 @@ export default class LlmChatPanel extends BasicWidget {
this.showSources(postResponse.sources);
}
- // Process the assistant response
- this.processAssistantResponse(postResponse.content, postResponse);
+ // Process the assistant response with original chat note ID
+ this.processAssistantResponse(postResponse.content, postResponse, this.noteId);
hideLoadingIndicator(this.loadingIndicator);
return true;
@@ -884,7 +993,7 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Process an assistant response - add to UI and save
*/
- private async processAssistantResponse(content: string, fullResponse?: any) {
+ private async processAssistantResponse(content: string, fullResponse?: any, originalChatNoteId?: string | null) {
// Add the response to the chat UI
this.addMessageToChat('assistant', content);
@@ -910,8 +1019,8 @@ export default class LlmChatPanel extends BasicWidget {
];
}
- // Save to note
- this.saveCurrentData().catch(err => {
+ // Save to note - use original chat note ID if provided
+ this.saveCurrentDataToSpecificNote(originalChatNoteId || this.noteId).catch(err => {
console.error("Failed to save assistant response to note:", err);
});
}
@@ -936,12 +1045,15 @@ export default class LlmChatPanel extends BasicWidget {
timestamp: string;
}> = [];
+ // Store the original chat note ID to ensure we save to the correct note even if user switches
+ const originalChatNoteId = this.noteId;
+
return setupStreamingResponse(
this.noteId,
messageParams,
// Content update handler
(content: string, isDone: boolean = false) => {
- this.updateStreamingUI(content, isDone);
+ this.updateStreamingUI(content, isDone, originalChatNoteId);
// Update session data with additional metadata when streaming is complete
if (isDone) {
@@ -1067,13 +1179,13 @@ export default class LlmChatPanel extends BasicWidget {
/**
* Update the UI with streaming content
*/
- private updateStreamingUI(assistantResponse: string, isDone: boolean = false) {
+ private updateStreamingUI(assistantResponse: string, isDone: boolean = false, originalChatNoteId?: string | null) {
// Track if we have a streaming message in progress
const hasStreamingMessage = !!this.noteContextChatMessages.querySelector('.assistant-message.streaming');
-
+
// Create a new message element or use the existing streaming one
let assistantMessageEl: HTMLElement;
-
+
if (hasStreamingMessage) {
// Use the existing streaming message
assistantMessageEl = this.noteContextChatMessages.querySelector('.assistant-message.streaming')!;
@@ -1103,7 +1215,7 @@ export default class LlmChatPanel extends BasicWidget {
if (isDone) {
// Remove the streaming class to mark this message as complete
assistantMessageEl.classList.remove('streaming');
-
+
// Apply syntax highlighting
formatCodeBlocks($(assistantMessageEl as HTMLElement));
@@ -1118,8 +1230,8 @@ export default class LlmChatPanel extends BasicWidget {
timestamp: new Date()
});
- // Save the updated message list
- this.saveCurrentData();
+ // Save the updated message list to the original chat note
+ this.saveCurrentDataToSpecificNote(originalChatNoteId || this.noteId);
}
// Scroll to bottom
diff --git a/apps/client/src/widgets/llm_chat/validation.ts b/apps/client/src/widgets/llm_chat/validation.ts
index 60aac7003..67a3bbcdc 100644
--- a/apps/client/src/widgets/llm_chat/validation.ts
+++ b/apps/client/src/widgets/llm_chat/validation.ts
@@ -2,6 +2,7 @@
* Validation functions for LLM Chat
*/
import options from "../../services/options.js";
+import { t } from "../../services/i18n.js";
/**
* Validate providers configuration
@@ -37,6 +38,9 @@ export async function validateProviders(validationWarning: HTMLElement): Promise
// Check for configuration issues with providers in the precedence list
const configIssues: string[] = [];
+ // Always add experimental warning as the first item
+ configIssues.push(t("ai_llm.experimental_warning"));
+
// Check each provider in the precedence list for proper configuration
for (const provider of precedenceList) {
if (provider === 'openai') {
diff --git a/apps/client/src/widgets/type_widgets/ai_chat.ts b/apps/client/src/widgets/type_widgets/ai_chat.ts
index f733b499b..7f015d334 100644
--- a/apps/client/src/widgets/type_widgets/ai_chat.ts
+++ b/apps/client/src/widgets/type_widgets/ai_chat.ts
@@ -182,17 +182,30 @@ export default class AiChatTypeWidget extends TypeWidget {
// Save chat data to the note
async saveData(data: any) {
- if (!this.note) {
+ // If we have a noteId in the data, that's the AI Chat note we should save to
+ // This happens when the chat panel is saving its conversation
+ const targetNoteId = data.noteId;
+
+ // If no noteId in data, use the current note (for new chats)
+ const noteIdToUse = targetNoteId || this.note?.noteId;
+
+ if (!noteIdToUse) {
+ console.warn("Cannot save AI Chat data: no note ID available");
return;
}
try {
- console.log(`AiChatTypeWidget: Saving data for note ${this.note.noteId}`);
+ console.log(`AiChatTypeWidget: Saving data for note ${noteIdToUse} (current note: ${this.note?.noteId}, data.noteId: ${data.noteId})`);
+
+ // Safety check: if we have both IDs and they don't match, warn about it
+ if (targetNoteId && this.note?.noteId && targetNoteId !== this.note.noteId) {
+ console.warn(`Note ID mismatch: saving to ${targetNoteId} but current note is ${this.note.noteId}`);
+ }
// Format the data properly - this is the canonical format of the data
const formattedData = {
messages: data.messages || [],
- noteId: this.note.noteId, // Always use the note's own ID
+ noteId: noteIdToUse, // Always preserve the correct note ID
toolSteps: data.toolSteps || [],
sources: data.sources || [],
metadata: {
@@ -201,8 +214,8 @@ export default class AiChatTypeWidget extends TypeWidget {
}
};
- // Save the data to the note
- await server.put(`notes/${this.note.noteId}/data`, {
+ // Save the data to the correct note
+ await server.put(`notes/${noteIdToUse}/data`, {
content: JSON.stringify(formattedData, null, 2)
});
} catch (e) {
diff --git a/apps/client/src/widgets/type_widgets/editable_text.ts b/apps/client/src/widgets/type_widgets/editable_text.ts
index cf9ecb47e..2c62eb08e 100644
--- a/apps/client/src/widgets/type_widgets/editable_text.ts
+++ b/apps/client/src/widgets/type_widgets/editable_text.ts
@@ -166,7 +166,7 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
// is shorter than minimumNonErrorTimePeriod, the watchdog changes
// its state to crashedPermanently, and it stops restarting the editor.
// This prevents an infinite restart loop.
- crashNumberLimit: 3,
+ crashNumberLimit: 10,
// A minimum number of milliseconds between saving the editor data internally (defaults to 5000).
// Note that for large documents, this might impact the editor performance.
saveInterval: 5000
@@ -181,8 +181,7 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
return;
}
- logInfo(`CKEditor crash logs: ${JSON.stringify(this.watchdog.crashes)}`);
- this.watchdog.crashes.forEach((crashInfo) => console.log(crashInfo));
+ logError(`CKEditor crash logs: ${JSON.stringify(this.watchdog.crashes, null, 4)}`);
if (currentState === "crashedPermanently") {
dialogService.info(`Editing component keeps crashing. Please try restarting Trilium. If problem persists, consider creating a bug report.`);
@@ -191,7 +190,7 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
}
});
- this.watchdog.setCreator(async (elementOrData, editorConfig) => {
+ this.watchdog.setCreator(async (_, editorConfig) => {
logInfo("Creating new CKEditor");
const finalConfig = {
@@ -221,7 +220,7 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
}
//@ts-ignore
- const editor = await editorClass.create(elementOrData, finalConfig);
+ const editor = await editorClass.create(this.$editor[0], finalConfig);
const notificationsPlugin = editor.plugins.get("Notification");
notificationsPlugin.on("show:warning", (evt, data) => {
@@ -337,6 +336,11 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
}
getData() {
+ if (!this.watchdog.editor) {
+ // There is nothing to save, most likely a result of the editor crashing and reinitializing.
+ return;
+ }
+
const content = this.watchdog.editor?.getData() ?? "";
// if content is only tags/whitespace (typically
), then just make it empty,
@@ -375,7 +379,7 @@ export default class EditableTextTypeWidget extends AbstractTextTypeWidget {
}
}
- insertDateTimeToTextCommand() {
+ insertDateTimeToTextCommand() {
const date = new Date();
const customDateTimeFormat = options.get("customDateTimeFormat");
const dateString = utils.formatDateTime(date, customDateTimeFormat);
diff --git a/apps/client/src/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts b/apps/client/src/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
index 583f24f9c..ce16fb64d 100644
--- a/apps/client/src/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
+++ b/apps/client/src/widgets/type_widgets/options/ai_settings/ai_settings_widget.ts
@@ -48,7 +48,7 @@ export default class AiSettingsWidget extends OptionsWidget {
if (optionName === 'aiEnabled') {
try {
const isEnabled = value === 'true';
-
+
if (isEnabled) {
toastService.showMessage(t("ai_llm.ai_enabled") || "AI features enabled");
} else {
@@ -203,6 +203,11 @@ export default class AiSettingsWidget extends OptionsWidget {
// Get selected provider
const selectedProvider = this.$widget.find('.ai-selected-provider').val() as string;
+ // Start with experimental warning
+ const allWarnings = [
+ t("ai_llm.experimental_warning")
+ ];
+
// Check for selected provider configuration
const providerWarnings: string[] = [];
if (selectedProvider === 'openai') {
@@ -222,10 +227,8 @@ export default class AiSettingsWidget extends OptionsWidget {
}
}
- // Combine all warnings
- const allWarnings = [
- ...providerWarnings
- ];
+ // Add provider warnings to all warnings
+ allWarnings.push(...providerWarnings);
// Show or hide warnings
if (allWarnings.length > 0) {
diff --git a/apps/server/.serve-nodir.env b/apps/server/.serve-nodir.env
new file mode 100644
index 000000000..3612d5d9b
--- /dev/null
+++ b/apps/server/.serve-nodir.env
@@ -0,0 +1,3 @@
+TRILIUM_ENV=dev
+TRILIUM_RESOURCE_DIR=./apps/server/dist
+TRILIUM_PUBLIC_SERVER=http://localhost:4200
\ No newline at end of file
diff --git a/apps/server/package.json b/apps/server/package.json
index 9e662763c..d15b42976 100644
--- a/apps/server/package.json
+++ b/apps/server/package.json
@@ -11,25 +11,25 @@
"@types/archiver": "6.0.3",
"@types/better-sqlite3": "7.6.13",
"@types/cls-hooked": "4.3.9",
- "@types/compression": "1.8.0",
- "@types/cookie-parser": "1.4.8",
+ "@types/compression": "1.8.1",
+ "@types/cookie-parser": "1.4.9",
"@types/debounce": "1.2.4",
"@types/ejs": "3.1.5",
"@types/escape-html": "1.0.4",
"@types/express-http-proxy": "1.6.6",
- "@types/express-session": "1.18.1",
+ "@types/express-session": "1.18.2",
"@types/fs-extra": "11.0.4",
"@types/html": "1.0.4",
"@types/ini": "4.1.1",
"@types/js-yaml": "4.0.9",
"@types/jsdom": "21.1.7",
- "@types/mime-types": "3.0.0",
- "@types/multer": "1.4.12",
+ "@types/mime-types": "3.0.1",
+ "@types/multer": "1.4.13",
"@types/safe-compare": "1.1.2",
"@types/sanitize-html": "2.16.0",
"@types/sax": "1.2.7",
"@types/serve-favicon": "2.5.7",
- "@types/serve-static": "1.15.7",
+ "@types/serve-static": "1.15.8",
"@types/session-file-store": "1.2.5",
"@types/stream-throttle": "0.1.4",
"@types/supertest": "6.0.3",
@@ -49,7 +49,7 @@
"axios": "1.9.0",
"bindings": "1.5.0",
"chardet": "2.1.0",
- "cheerio": "1.0.0",
+ "cheerio": "1.1.0",
"chokidar": "4.0.3",
"cls-hooked": "4.2.2",
"compression": "1.8.0",
@@ -129,6 +129,23 @@
"runBuildTargetDependencies": false
}
},
+ "serve-nodir": {
+ "executor": "@nx/js:node",
+ "dependsOn": [
+ {
+ "projects": [
+ "client"
+ ],
+ "target": "serve"
+ },
+ "build-without-client"
+ ],
+ "continuous": true,
+ "options": {
+ "buildTarget": "server:build-without-client:development",
+ "runBuildTargetDependencies": false
+ }
+ },
"edit-integration-db": {
"executor": "@nx/js:node",
"dependsOn": [
diff --git a/apps/server/src/services/app_info.ts b/apps/server/src/services/app_info.ts
index 7f70f8bcd..b3da2ac0e 100644
--- a/apps/server/src/services/app_info.ts
+++ b/apps/server/src/services/app_info.ts
@@ -3,8 +3,8 @@ import build from "./build.js";
import packageJson from "../../package.json" with { type: "json" };
import dataDir from "./data_dir.js";
-const APP_DB_VERSION = 231;
-const SYNC_VERSION = 35;
+const APP_DB_VERSION = 232;
+const SYNC_VERSION = 36;
const CLIPPER_PROTOCOL_VERSION = "1.0";
export default {
diff --git a/apps/server/src/services/llm/ai_service_manager.ts b/apps/server/src/services/llm/ai_service_manager.ts
index cbb0a8755..bd47b4327 100644
--- a/apps/server/src/services/llm/ai_service_manager.ts
+++ b/apps/server/src/services/llm/ai_service_manager.ts
@@ -40,8 +40,8 @@ interface NoteContext {
}
export class AIServiceManager implements IAIServiceManager {
- private services: Partial> = {};
-
+ private currentService: AIService | null = null;
+ private currentProvider: ServiceProviders | null = null;
private initialized = false;
constructor() {
@@ -50,9 +50,8 @@ export class AIServiceManager implements IAIServiceManager {
log.error(`Error initializing LLM tools during AIServiceManager construction: ${error.message || String(error)}`);
});
- // Set up event listener for provider changes
- this.setupProviderChangeListener();
-
+ // Removed complex provider change listener - we'll read options fresh each time
+
this.initialized = true;
}
@@ -140,15 +139,15 @@ export class AIServiceManager implements IAIServiceManager {
*/
async getOrCreateAnyService(): Promise {
this.ensureInitialized();
-
+
// Get the selected provider using the new configuration system
const selectedProvider = await this.getSelectedProviderAsync();
-
-
+
+
if (!selectedProvider) {
throw new Error('No AI provider is selected. Please select a provider (OpenAI, Anthropic, or Ollama) in your AI settings.');
}
-
+
try {
const service = await this.getOrCreateChatProvider(selectedProvider);
if (service) {
@@ -166,7 +165,7 @@ export class AIServiceManager implements IAIServiceManager {
*/
isAnyServiceAvailable(): boolean {
this.ensureInitialized();
-
+
// Check if we have the selected provider available
return this.getAvailableProviders().length > 0;
}
@@ -174,43 +173,37 @@ export class AIServiceManager implements IAIServiceManager {
/**
* Get list of available providers
*/
- getAvailableProviders(): ServiceProviders[] {
+ getAvailableProviders(): ServiceProviders[] {
this.ensureInitialized();
-
+
const allProviders: ServiceProviders[] = ['openai', 'anthropic', 'ollama'];
const availableProviders: ServiceProviders[] = [];
-
+
for (const providerName of allProviders) {
- // Use a sync approach - check if we can create the provider
- const service = this.services[providerName];
- if (service && service.isAvailable()) {
- availableProviders.push(providerName);
- } else {
- // For providers not yet created, check configuration to see if they would be available
- try {
- switch (providerName) {
- case 'openai':
- if (options.getOption('openaiApiKey')) {
- availableProviders.push(providerName);
- }
- break;
- case 'anthropic':
- if (options.getOption('anthropicApiKey')) {
- availableProviders.push(providerName);
- }
- break;
- case 'ollama':
- if (options.getOption('ollamaBaseUrl')) {
- availableProviders.push(providerName);
- }
- break;
- }
- } catch (error) {
- // Ignore configuration errors, provider just won't be available
+ // Check configuration to see if provider would be available
+ try {
+ switch (providerName) {
+ case 'openai':
+ if (options.getOption('openaiApiKey') || options.getOption('openaiBaseUrl')) {
+ availableProviders.push(providerName);
+ }
+ break;
+ case 'anthropic':
+ if (options.getOption('anthropicApiKey')) {
+ availableProviders.push(providerName);
+ }
+ break;
+ case 'ollama':
+ if (options.getOption('ollamaBaseUrl')) {
+ availableProviders.push(providerName);
+ }
+ break;
}
+ } catch (error) {
+ // Ignore configuration errors, provider just won't be available
}
}
-
+
return availableProviders;
}
@@ -234,11 +227,11 @@ export class AIServiceManager implements IAIServiceManager {
// Get the selected provider
const selectedProvider = await this.getSelectedProviderAsync();
-
+
if (!selectedProvider) {
throw new Error('No AI provider is selected. Please select a provider in your AI settings.');
}
-
+
// Check if the selected provider is available
const availableProviders = this.getAvailableProviders();
if (!availableProviders.includes(selectedProvider)) {
@@ -379,47 +372,68 @@ export class AIServiceManager implements IAIServiceManager {
}
/**
- * Get or create a chat provider on-demand with inline validation
+ * Clear the current provider (forces recreation on next access)
+ */
+ public clearCurrentProvider(): void {
+ this.currentService = null;
+ this.currentProvider = null;
+ log.info('Cleared current provider - will be recreated on next access');
+ }
+
+ /**
+ * Get or create the current provider instance - only one instance total
*/
private async getOrCreateChatProvider(providerName: ServiceProviders): Promise {
- // Return existing provider if already created
- if (this.services[providerName]) {
- return this.services[providerName];
+ // If provider type changed, clear the old one
+ if (this.currentProvider && this.currentProvider !== providerName) {
+ log.info(`Provider changed from ${this.currentProvider} to ${providerName}, clearing old service`);
+ this.currentService = null;
+ this.currentProvider = null;
}
- // Create and validate provider on-demand
+ // Return existing service if it matches and is available
+ if (this.currentService && this.currentProvider === providerName && this.currentService.isAvailable()) {
+ return this.currentService;
+ }
+
+ // Clear invalid service
+ if (this.currentService) {
+ this.currentService = null;
+ this.currentProvider = null;
+ }
+
+ // Create new service for the requested provider
try {
let service: AIService | null = null;
-
+
switch (providerName) {
case 'openai': {
const apiKey = options.getOption('openaiApiKey');
const baseUrl = options.getOption('openaiBaseUrl');
if (!apiKey && !baseUrl) return null;
-
+
service = new OpenAIService();
- // Validate by checking if it's available
if (!service.isAvailable()) {
throw new Error('OpenAI service not available');
}
break;
}
-
+
case 'anthropic': {
const apiKey = options.getOption('anthropicApiKey');
if (!apiKey) return null;
-
+
service = new AnthropicService();
if (!service.isAvailable()) {
throw new Error('Anthropic service not available');
}
break;
}
-
+
case 'ollama': {
const baseUrl = options.getOption('ollamaBaseUrl');
if (!baseUrl) return null;
-
+
service = new OllamaService();
if (!service.isAvailable()) {
throw new Error('Ollama service not available');
@@ -427,9 +441,12 @@ export class AIServiceManager implements IAIServiceManager {
break;
}
}
-
+
if (service) {
- this.services[providerName] = service;
+ // Cache the new service
+ this.currentService = service;
+ this.currentProvider = providerName;
+ log.info(`Created and cached new ${providerName} service`);
return service;
}
} catch (error: any) {
@@ -630,28 +647,47 @@ export class AIServiceManager implements IAIServiceManager {
* Check if a specific provider is available
*/
isProviderAvailable(provider: string): boolean {
- return this.services[provider as ServiceProviders]?.isAvailable() ?? false;
+ // Check if this is the current provider and if it's available
+ if (this.currentProvider === provider && this.currentService) {
+ return this.currentService.isAvailable();
+ }
+
+ // For other providers, check configuration
+ try {
+ switch (provider) {
+ case 'openai':
+ return !!(options.getOption('openaiApiKey') || options.getOption('openaiBaseUrl'));
+ case 'anthropic':
+ return !!options.getOption('anthropicApiKey');
+ case 'ollama':
+ return !!options.getOption('ollamaBaseUrl');
+ default:
+ return false;
+ }
+ } catch {
+ return false;
+ }
}
/**
* Get metadata about a provider
*/
getProviderMetadata(provider: string): ProviderMetadata | null {
- const service = this.services[provider as ServiceProviders];
- if (!service) {
- return null;
+ // Only return metadata if this is the current active provider
+ if (this.currentProvider === provider && this.currentService) {
+ return {
+ name: provider,
+ capabilities: {
+ chat: true,
+ streaming: true,
+ functionCalling: provider === 'openai' // Only OpenAI has function calling
+ },
+ models: ['default'], // Placeholder, could be populated from the service
+ defaultModel: 'default'
+ };
}
- return {
- name: provider,
- capabilities: {
- chat: true,
- streaming: true,
- functionCalling: provider === 'openai' // Only OpenAI has function calling
- },
- models: ['default'], // Placeholder, could be populated from the service
- defaultModel: 'default'
- };
+ return null;
}
@@ -665,67 +701,8 @@ export class AIServiceManager implements IAIServiceManager {
return String(error);
}
- /**
- * Set up event listener for provider changes
- */
- private setupProviderChangeListener(): void {
- // List of AI-related options that should trigger service recreation
- const aiRelatedOptions = [
- 'aiEnabled',
- 'aiSelectedProvider',
- 'openaiApiKey',
- 'openaiBaseUrl',
- 'openaiDefaultModel',
- 'anthropicApiKey',
- 'anthropicBaseUrl',
- 'anthropicDefaultModel',
- 'ollamaBaseUrl',
- 'ollamaDefaultModel'
- ];
-
- eventService.subscribe(['entityChanged'], async ({ entityName, entity }) => {
- if (entityName === 'options' && entity && aiRelatedOptions.includes(entity.name)) {
- log.info(`AI-related option '${entity.name}' changed, recreating LLM services`);
-
- // Special handling for aiEnabled toggle
- if (entity.name === 'aiEnabled') {
- const isEnabled = entity.value === 'true';
-
- if (isEnabled) {
- log.info('AI features enabled, initializing AI service');
- // Initialize the AI service
- await this.initialize();
- } else {
- log.info('AI features disabled, clearing providers');
- // Clear chat providers
- this.services = {};
- }
- } else {
- // For other AI-related options, recreate services on-demand
- await this.recreateServices();
- }
- }
- });
- }
-
- /**
- * Recreate LLM services when provider settings change
- */
- private async recreateServices(): Promise {
- try {
- log.info('Recreating LLM services due to configuration change');
-
- // Clear configuration cache first
- clearConfigurationCache();
-
- // Clear existing chat providers (they will be recreated on-demand)
- this.services = {};
-
- log.info('LLM services recreated successfully');
- } catch (error) {
- log.error(`Error recreating LLM services: ${this.handleError(error)}`);
- }
- }
+ // Removed complex event listener and cache invalidation logic
+ // Services will be created fresh when needed by reading current options
}
diff --git a/apps/server/src/services/llm/config/configuration_helpers.ts b/apps/server/src/services/llm/config/configuration_helpers.ts
index d48504bd2..9f92164a2 100644
--- a/apps/server/src/services/llm/config/configuration_helpers.ts
+++ b/apps/server/src/services/llm/config/configuration_helpers.ts
@@ -1,4 +1,3 @@
-import configurationManager from './configuration_manager.js';
import optionService from '../../options.js';
import log from '../../log.js';
import type {
@@ -13,7 +12,7 @@ import type {
*/
/**
- * Get the selected AI provider
+ * Get the selected AI provider - always fresh from options
*/
export async function getSelectedProvider(): Promise {
const providerOption = optionService.getOption('aiSelectedProvider');
@@ -25,38 +24,100 @@ export async function getSelectedProvider(): Promise {
* Parse a model identifier (handles "provider:model" format)
*/
export function parseModelIdentifier(modelString: string): ModelIdentifier {
- return configurationManager.parseModelIdentifier(modelString);
+ if (!modelString) {
+ return {
+ modelId: '',
+ fullIdentifier: ''
+ };
+ }
+
+ const parts = modelString.split(':');
+
+ if (parts.length === 1) {
+ // No provider prefix, just model name
+ return {
+ modelId: modelString,
+ fullIdentifier: modelString
+ };
+ }
+
+ // Check if first part is a known provider
+ const potentialProvider = parts[0].toLowerCase();
+ const knownProviders: ProviderType[] = ['openai', 'anthropic', 'ollama'];
+
+ if (knownProviders.includes(potentialProvider as ProviderType)) {
+ // Provider prefix format
+ const provider = potentialProvider as ProviderType;
+ const modelId = parts.slice(1).join(':'); // Rejoin in case model has colons
+
+ return {
+ provider,
+ modelId,
+ fullIdentifier: modelString
+ };
+ }
+
+ // Not a provider prefix, treat whole string as model name
+ return {
+ modelId: modelString,
+ fullIdentifier: modelString
+ };
}
/**
* Create a model configuration from a model string
*/
export function createModelConfig(modelString: string, defaultProvider?: ProviderType): ModelConfig {
- return configurationManager.createModelConfig(modelString, defaultProvider);
+ const identifier = parseModelIdentifier(modelString);
+ const provider = identifier.provider || defaultProvider || 'openai'; // fallback to openai if no provider specified
+
+ return {
+ provider,
+ modelId: identifier.modelId,
+ displayName: identifier.fullIdentifier
+ };
}
/**
- * Get the default model for a specific provider
+ * Get the default model for a specific provider - always fresh from options
*/
export async function getDefaultModelForProvider(provider: ProviderType): Promise {
- const config = await configurationManager.getAIConfig();
- return config.defaultModels[provider]; // This can now be undefined
+ const optionKey = `${provider}DefaultModel` as const;
+ return optionService.getOption(optionKey) || undefined;
}
/**
- * Get provider settings for a specific provider
+ * Get provider settings for a specific provider - always fresh from options
*/
export async function getProviderSettings(provider: ProviderType) {
- const config = await configurationManager.getAIConfig();
- return config.providerSettings[provider];
+ switch (provider) {
+ case 'openai':
+ return {
+ apiKey: optionService.getOption('openaiApiKey'),
+ baseUrl: optionService.getOption('openaiBaseUrl'),
+ defaultModel: optionService.getOption('openaiDefaultModel')
+ };
+ case 'anthropic':
+ return {
+ apiKey: optionService.getOption('anthropicApiKey'),
+ baseUrl: optionService.getOption('anthropicBaseUrl'),
+ defaultModel: optionService.getOption('anthropicDefaultModel')
+ };
+ case 'ollama':
+ return {
+ baseUrl: optionService.getOption('ollamaBaseUrl'),
+ defaultModel: optionService.getOption('ollamaDefaultModel')
+ };
+ default:
+ return {};
+ }
}
/**
- * Check if AI is enabled
+ * Check if AI is enabled - always fresh from options
*/
export async function isAIEnabled(): Promise {
- const config = await configurationManager.getAIConfig();
- return config.enabled;
+ return optionService.getOptionBool('aiEnabled');
}
/**
@@ -82,7 +143,7 @@ export async function isProviderConfigured(provider: ProviderType): Promise {
const selectedProvider = await getSelectedProvider();
-
+
if (!selectedProvider) {
return null; // No provider selected
}
@@ -95,17 +156,51 @@ export async function getAvailableSelectedProvider(): Promise {
const selectedProvider = await getSelectedProvider();
-
+
if (!selectedProvider) {
return null; // No provider selected
}
diff --git a/apps/server/src/services/llm/config/configuration_manager.ts b/apps/server/src/services/llm/config/configuration_manager.ts
index 8dfd8df05..6b879eed6 100644
--- a/apps/server/src/services/llm/config/configuration_manager.ts
+++ b/apps/server/src/services/llm/config/configuration_manager.ts
@@ -21,11 +21,6 @@ import type {
*/
export class ConfigurationManager {
private static instance: ConfigurationManager | null = null;
- private cachedConfig: AIConfig | null = null;
- private lastConfigUpdate: number = 0;
-
- // Cache for 5 minutes to avoid excessive option reads
- private static readonly CACHE_DURATION = 5 * 60 * 1000;
private constructor() {}
@@ -37,14 +32,9 @@ export class ConfigurationManager {
}
/**
- * Get the complete AI configuration
+ * Get the complete AI configuration - always fresh, no caching
*/
public async getAIConfig(): Promise {
- const now = Date.now();
- if (this.cachedConfig && (now - this.lastConfigUpdate) < ConfigurationManager.CACHE_DURATION) {
- return this.cachedConfig;
- }
-
try {
const config: AIConfig = {
enabled: await this.getAIEnabled(),
@@ -53,8 +43,6 @@ export class ConfigurationManager {
providerSettings: await this.getProviderSettings()
};
- this.cachedConfig = config;
- this.lastConfigUpdate = now;
return config;
} catch (error) {
log.error(`Error loading AI configuration: ${error}`);
@@ -263,14 +251,6 @@ export class ConfigurationManager {
return result;
}
- /**
- * Clear cached configuration (force reload on next access)
- */
- public clearCache(): void {
- this.cachedConfig = null;
- this.lastConfigUpdate = 0;
- }
-
// Private helper methods
private async getAIEnabled(): Promise {
diff --git a/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts b/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts
index b51e59658..7b1276b91 100644
--- a/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts
+++ b/apps/server/src/services/llm/pipeline/stages/model_selection_stage.ts
@@ -111,19 +111,13 @@ export class ModelSelectionStage extends BasePipelineStage {
- try {
- // Use the same logic as the main process method
- const { getValidModelConfig, getSelectedProvider } = await import('../../config/configuration_helpers.js');
- const selectedProvider = await getSelectedProvider();
- if (!selectedProvider) {
- throw new Error('No AI provider is selected. Please select a provider in your AI settings.');
- }
-
- // Check if the provider is available through the service manager
- if (!aiServiceManager.isProviderAvailable(selectedProvider)) {
- throw new Error(`Selected provider ${selectedProvider} is not available`);
- }
-
- // Try to get a valid model config
- const modelConfig = await getValidModelConfig(selectedProvider);
-
- if (!modelConfig) {
- throw new Error(`No default model configured for provider ${selectedProvider}. Please configure a default model in your AI settings.`);
- }
-
- // Set provider metadata
- if (!input.options.providerMetadata) {
- input.options.providerMetadata = {
- provider: selectedProvider as 'openai' | 'anthropic' | 'ollama' | 'local',
- modelId: modelConfig.model
- };
- }
-
- log.info(`Selected default model ${modelConfig.model} from provider ${selectedProvider}`);
- return modelConfig.model;
- } catch (error) {
- log.error(`Error determining default model: ${error}`);
- throw error; // Don't provide fallback defaults, let the error propagate
- }
- }
/**
* Get estimated context window for Ollama models
@@ -283,48 +225,5 @@ export class ModelSelectionStage extends BasePipelineStage {
- try {
- log.info(`Getting default model for provider ${provider} using AI service manager`);
-
- // Use the existing AI service manager instead of duplicating API calls
- const service = await aiServiceManager.getInstance().getService(provider);
-
- if (!service || !service.isAvailable()) {
- log.info(`Provider ${provider} service is not available`);
- return null;
- }
- // Check if the service has a method to get available models
- if (typeof (service as any).getAvailableModels === 'function') {
- try {
- const models = await (service as any).getAvailableModels();
- if (models && models.length > 0) {
- // Use the first available model - no hardcoded preferences
- const selectedModel = models[0];
-
- // Import server-side options to update the default model
- const optionService = (await import('../../../options.js')).default;
- const optionKey = `${provider}DefaultModel` as const;
-
- await optionService.setOption(optionKey, selectedModel);
- log.info(`Set default ${provider} model to: ${selectedModel}`);
- return selectedModel;
- }
- } catch (modelError) {
- log.error(`Error fetching models from ${provider} service: ${modelError}`);
- }
- }
-
- log.info(`Provider ${provider} does not support dynamic model fetching`);
- return null;
- } catch (error) {
- log.error(`Error getting default model for provider ${provider}: ${error}`);
- return null;
- }
- }
}
diff --git a/apps/server/src/services/llm/providers/providers.ts b/apps/server/src/services/llm/providers/providers.ts
index 8575fa6aa..5416d9366 100644
--- a/apps/server/src/services/llm/providers/providers.ts
+++ b/apps/server/src/services/llm/providers/providers.ts
@@ -26,7 +26,11 @@ export function getOpenAIOptions(
}
const baseUrl = options.getOption('openaiBaseUrl') || PROVIDER_CONSTANTS.OPENAI.BASE_URL;
- const modelName = opts.model || options.getOption('openaiDefaultModel') || PROVIDER_CONSTANTS.OPENAI.DEFAULT_MODEL;
+ const modelName = opts.model || options.getOption('openaiDefaultModel');
+
+ if (!modelName) {
+ throw new Error('No OpenAI model configured. Please set a default model in your AI settings.');
+ }
// Create provider metadata
const providerMetadata: ModelMetadata = {
@@ -87,7 +91,11 @@ export function getAnthropicOptions(
}
const baseUrl = options.getOption('anthropicBaseUrl') || PROVIDER_CONSTANTS.ANTHROPIC.BASE_URL;
- const modelName = opts.model || options.getOption('anthropicDefaultModel') || PROVIDER_CONSTANTS.ANTHROPIC.DEFAULT_MODEL;
+ const modelName = opts.model || options.getOption('anthropicDefaultModel');
+
+ if (!modelName) {
+ throw new Error('No Anthropic model configured. Please set a default model in your AI settings.');
+ }
// Create provider metadata
const providerMetadata: ModelMetadata = {
@@ -150,8 +158,12 @@ export async function getOllamaOptions(
throw new Error('Ollama API URL is not configured');
}
- // Get the model name - no prefix handling needed now
- let modelName = opts.model || options.getOption('ollamaDefaultModel') || 'llama3';
+ // Get the model name - no defaults, must be configured by user
+ let modelName = opts.model || options.getOption('ollamaDefaultModel');
+
+ if (!modelName) {
+ throw new Error('No Ollama model configured. Please set a default model in your AI settings.');
+ }
// Create provider metadata
const providerMetadata: ModelMetadata = {
@@ -249,4 +261,4 @@ async function getOllamaModelContextWindow(modelName: string): Promise {
log.info(`Error getting context window for model ${modelName}: ${error}`);
return MODEL_CAPABILITIES['default'].contextWindowTokens; // Default fallback
}
-}
\ No newline at end of file
+}
diff --git a/apps/server/src/services/options.ts b/apps/server/src/services/options.ts
index f6e575c19..1cc67df5a 100644
--- a/apps/server/src/services/options.ts
+++ b/apps/server/src/services/options.ts
@@ -82,6 +82,26 @@ function setOption(name: T, value: string | OptionDefinit
} else {
createOption(name, value, false);
}
+
+ // Clear current AI provider when AI-related options change
+ const aiOptions = [
+ 'aiSelectedProvider', 'openaiApiKey', 'openaiBaseUrl', 'openaiDefaultModel',
+ 'anthropicApiKey', 'anthropicBaseUrl', 'anthropicDefaultModel',
+ 'ollamaBaseUrl', 'ollamaDefaultModel'
+ ];
+
+ if (aiOptions.includes(name)) {
+ // Import dynamically to avoid circular dependencies
+ setImmediate(async () => {
+ try {
+ const aiServiceManager = (await import('./llm/ai_service_manager.js')).default;
+ aiServiceManager.getInstance().clearCurrentProvider();
+ console.log(`Cleared AI provider after ${name} option changed`);
+ } catch (error) {
+ console.log(`Could not clear AI provider: ${error}`);
+ }
+ });
+ }
}
/**
diff --git a/docs/User Guide/User Guide/Installation & Setup/Server Installation/2. Reverse proxy/Nginx.md b/docs/User Guide/User Guide/Installation & Setup/Server Installation/2. Reverse proxy/Nginx.md
index 25572d50a..24b57c4ea 100644
--- a/docs/User Guide/User Guide/Installation & Setup/Server Installation/2. Reverse proxy/Nginx.md
+++ b/docs/User Guide/User Guide/Installation & Setup/Server Installation/2. Reverse proxy/Nginx.md
@@ -16,6 +16,13 @@ Configure Nginx proxy and HTTPS. The operating system here is Ubuntu 18.04.
3. Fill the file with the context shown below, part of the setting show be changed. Then you can enjoy your web with HTTPS forced and proxy.
```
+ # This part configures, where your Trilium server is running
+ upstream trilium {
+ zone trilium 64k;
+ server 127.0.0.1:8080; # change it to a different hostname and port if non-default is used
+ keepalive 2;
+ }
+
# This part is for proxy and HTTPS configure
server {
listen 443 ssl;
@@ -35,9 +42,8 @@ Configure Nginx proxy and HTTPS. The operating system here is Ubuntu 18.04.
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
- proxy_pass http://127.0.0.1:8080; # change it to a different port if non-default is used
+ proxy_pass http://trilium;
proxy_read_timeout 90;
- proxy_redirect http://127.0.0.1:8080 https://trilium.example.net; # change them based on your IP, port and domain
}
}
@@ -55,16 +61,16 @@ Configure Nginx proxy and HTTPS. The operating system here is Ubuntu 18.04.
```
location /trilium/instance-one {
+ rewrite /trilium/instance-one/(.*) /$1 break;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
- proxy_pass http://127.0.0.1:8080; # change it to a different port if non-default is used
+ proxy_pass http://trilium;
proxy_cookie_path / /trilium/instance-one
proxy_read_timeout 90;
- proxy_redirect http://127.0.0.1:8080 https://trilium.example.net; # change them based on your IP, port and domain
}
- ```
\ No newline at end of file
+ ```
diff --git a/docs/User Guide/User Guide/Theme development/Reference.md b/docs/User Guide/User Guide/Theme development/Reference.md
index 7289c5b81..5e1036ae1 100644
--- a/docs/User Guide/User Guide/Theme development/Reference.md
+++ b/docs/User Guide/User Guide/Theme development/Reference.md
@@ -171,7 +171,7 @@ When a workspace is hoisted for a given tab, it is possible to get the backgroun
## Custom fonts
-Currently the only way to include a custom font is to useĀ [Custom resource providers](../Advanced%20Usage/Custom%20Resource%20Providers.md). Basically import a font into Trilium and assign it `#customResourceProvider=fonts/myfont.ttf` and then import the font in CSS via `/custom/fonts/myfont.ttf`.
+Currently the only way to include a custom font is to useĀ [Custom resource providers](../Advanced%20Usage/Custom%20Resource%20Providers.md). Basically import a font into Trilium and assign it `#customResourceProvider=fonts/myfont.ttf` and then import the font in CSS via `/custom/fonts/myfont.ttf`. Use `../../../custom/fonts/myfont.ttf` if you run your Trilium server on a different path than `/`.
## Dark and light themes
@@ -195,4 +195,4 @@ If the theme is auto (e.g. supports both light or dark based on `prefers-color-s
}
```
-This will affect the behavior of the Electron application by informing the operating system of the color preference (e.g. background effects will appear correct on Windows).
\ No newline at end of file
+This will affect the behavior of the Electron application by informing the operating system of the color preference (e.g. background effects will appear correct on Windows).
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 077fdef78..9143aebd1 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -77,7 +77,7 @@ importers:
version: link:apps/server
'@types/express':
specifier: ^5.0.0
- version: 5.0.2
+ version: 5.0.3
'@types/node':
specifier: 22.15.30
version: 22.15.30
@@ -495,11 +495,11 @@ importers:
specifier: 4.3.9
version: 4.3.9
'@types/compression':
- specifier: 1.8.0
- version: 1.8.0
+ specifier: 1.8.1
+ version: 1.8.1
'@types/cookie-parser':
- specifier: 1.4.8
- version: 1.4.8(@types/express@5.0.2)
+ specifier: 1.4.9
+ version: 1.4.9(@types/express@5.0.3)
'@types/debounce':
specifier: 1.2.4
version: 1.2.4
@@ -513,8 +513,8 @@ importers:
specifier: 1.6.6
version: 1.6.6
'@types/express-session':
- specifier: 1.18.1
- version: 1.18.1
+ specifier: 1.18.2
+ version: 1.18.2
'@types/fs-extra':
specifier: 11.0.4
version: 11.0.4
@@ -531,11 +531,11 @@ importers:
specifier: 21.1.7
version: 21.1.7
'@types/mime-types':
- specifier: 3.0.0
- version: 3.0.0
+ specifier: 3.0.1
+ version: 3.0.1
'@types/multer':
- specifier: 1.4.12
- version: 1.4.12
+ specifier: 1.4.13
+ version: 1.4.13
'@types/safe-compare':
specifier: 1.1.2
version: 1.1.2
@@ -549,8 +549,8 @@ importers:
specifier: 2.5.7
version: 2.5.7
'@types/serve-static':
- specifier: 1.15.7
- version: 1.15.7
+ specifier: 1.15.8
+ version: 1.15.8
'@types/session-file-store':
specifier: 1.2.5
version: 1.2.5
@@ -591,8 +591,8 @@ importers:
specifier: 2.1.0
version: 2.1.0
cheerio:
- specifier: 1.0.0
- version: 1.0.0
+ specifier: 1.1.0
+ version: 1.1.0
chokidar:
specifier: 4.0.3
version: 4.0.3
@@ -3273,8 +3273,8 @@ packages:
resolution: {integrity: sha512-wK+5pLK5XFmgtH3aQ2YVvA3HohS3xqV/OxuVOdNx9Wpnz7VE/fnC+e1A7ln6LFYeck7gOJ/dsZV6OLplOtAJ2w==}
engines: {node: '>=18'}
- '@napi-rs/wasm-runtime@0.2.10':
- resolution: {integrity: sha512-bCsCyeZEwVErsGmyPNSzwfwFn4OdxBj0mmv6hOFucB/k81Ojdu68RbZdxYsRQUPc9l6SU5F/cG+bXgWs3oUgsQ==}
+ '@napi-rs/wasm-runtime@0.2.11':
+ resolution: {integrity: sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA==}
'@napi-rs/wasm-runtime@0.2.4':
resolution: {integrity: sha512-9zESzOO5aDByvhIAsOy9TbpZ0Ur2AJbUI7UT73kcUTS2mxAMHOBaa1st/jAymNoCtvrit99kkzT1FZuXVcgfIQ==}
@@ -4349,8 +4349,8 @@ packages:
'@types/better-sqlite3@7.6.13':
resolution: {integrity: sha512-NMv9ASNARoKksWtsq/SHakpYAYnhBrQgGD8zkLYk/jaK8jUGn08CfEdTRgYhMypUQAfzSP8W6gNLe0q19/t4VA==}
- '@types/body-parser@1.19.5':
- resolution: {integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==}
+ '@types/body-parser@1.19.6':
+ resolution: {integrity: sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==}
'@types/bonjour@3.5.13':
resolution: {integrity: sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==}
@@ -4373,8 +4373,8 @@ packages:
'@types/color-name@1.1.5':
resolution: {integrity: sha512-j2K5UJqGTxeesj6oQuGpMgifpT5k9HprgQd8D1Y0lOFqKHl3PJu5GMeS4Y5EgjS55AE6OQxf8mPED9uaGbf4Cg==}
- '@types/compression@1.8.0':
- resolution: {integrity: sha512-g4vmPIwbTii9dX1HVioHbOolubEaf4re4vDxuzpKrzz9uI7uarBExi9begX0cXyIB85jXZ5X2A/v8rsHZxSAPw==}
+ '@types/compression@1.8.1':
+ resolution: {integrity: sha512-kCFuWS0ebDbmxs0AXYn6e2r2nrGAb5KwQhknjSPSPgJcGd8+HVSILlUyFhGqML2gk39HcG7D1ydW9/qpYkN00Q==}
'@types/connect-history-api-fallback@1.5.4':
resolution: {integrity: sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==}
@@ -4382,8 +4382,8 @@ packages:
'@types/connect@3.4.38':
resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==}
- '@types/cookie-parser@1.4.8':
- resolution: {integrity: sha512-l37JqFrOJ9yQfRQkljb41l0xVphc7kg5JTjjr+pLRZ0IyZ49V4BQ8vbF4Ut2C2e+WH4al3xD3ZwYwIUfnbT4NQ==}
+ '@types/cookie-parser@1.4.9':
+ resolution: {integrity: sha512-tGZiZ2Gtc4m3wIdLkZ8mkj1T6CEHb35+VApbL2T14Dew8HA7c+04dmKqsKRNC+8RJPm16JEK0tFSwdZqubfc4g==}
peerDependencies:
'@types/express': '*'
@@ -4523,14 +4523,14 @@ packages:
'@types/express-serve-static-core@5.0.6':
resolution: {integrity: sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==}
- '@types/express-session@1.18.1':
- resolution: {integrity: sha512-S6TkD/lljxDlQ2u/4A70luD8/ZxZcrU5pQwI1rVXCiaVIywoFgbA+PIUNDjPhQpPdK0dGleLtYc/y7XWBfclBg==}
+ '@types/express-session@1.18.2':
+ resolution: {integrity: sha512-k+I0BxwVXsnEU2hV77cCobC08kIsn4y44C3gC0b46uxZVMaXA04lSPgRLR/bSL2w0t0ShJiG8o4jPzRG/nscFg==}
- '@types/express@4.17.22':
- resolution: {integrity: sha512-eZUmSnhRX9YRSkplpz0N+k6NljUUn5l3EWZIKZvYzhvMphEuNiyyy1viH/ejgt66JWgALwC/gtSUAeQKtSwW/w==}
+ '@types/express@4.17.23':
+ resolution: {integrity: sha512-Crp6WY9aTYP3qPi2wGDo9iUe/rceX01UMhnF1jmwDcKCFM6cx7YhGP/Mpr3y9AASpfHixIG0E6azCcL5OcDHsQ==}
- '@types/express@5.0.2':
- resolution: {integrity: sha512-BtjL3ZwbCQriyb0DGw+Rt12qAXPiBTPs815lsUvtt1Grk0vLRMZNMUZ741d5rjk+UQOxfDiBZ3dxpX00vSkK3g==}
+ '@types/express@5.0.3':
+ resolution: {integrity: sha512-wGA0NX93b19/dZC1J18tKWVIYWyyF2ZjT9vin/NRu0qzzvfVzWjs04iq2rQ3H65vCTQYlRqs3YHfY7zjdV+9Kw==}
'@types/fs-extra@11.0.4':
resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==}
@@ -4616,6 +4616,9 @@ packages:
'@types/mime-types@3.0.0':
resolution: {integrity: sha512-9gFWMsVgEtbsD6yY/2z8pAtnZhdRKl4Q9xmKQJy5gv0fMpzJeeWtQyd7WpdhaIbRSwPCfnjXOsNMcoQvu5giGg==}
+ '@types/mime-types@3.0.1':
+ resolution: {integrity: sha512-xRMsfuQbnRq1Ef+C+RKaENOxXX87Ygl38W1vDfPHRku02TgQr+Qd8iivLtAMcR0KF5/29xlnFihkTlbqFrGOVQ==}
+
'@types/mime@1.3.5':
resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==}
@@ -4625,8 +4628,8 @@ packages:
'@types/ms@2.1.0':
resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==}
- '@types/multer@1.4.12':
- resolution: {integrity: sha512-pQ2hoqvXiJt2FP9WQVLPRO+AmiIm/ZYkavPlIQnx282u4ZrVdztx0pkh3jjpQt0Kz+YI0YhSG264y08UJKoUQg==}
+ '@types/multer@1.4.13':
+ resolution: {integrity: sha512-bhhdtPw7JqCiEfC9Jimx5LqX9BDIPJEh2q/fQ4bqbBPtyEZYr3cvF22NwG0DmPZNYA0CAf2CnqDB4KIGGpJcaw==}
'@types/node-forge@1.3.11':
resolution: {integrity: sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ==}
@@ -4687,14 +4690,17 @@ packages:
'@types/send@0.17.4':
resolution: {integrity: sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==}
+ '@types/send@0.17.5':
+ resolution: {integrity: sha512-z6F2D3cOStZvuk2SaP6YrwkNO65iTZcwA2ZkSABegdkAh/lf+Aa/YQndZVfmEXT5vgAp6zv06VQ3ejSVjAny4w==}
+
'@types/serve-favicon@2.5.7':
resolution: {integrity: sha512-z9TNUQXdQ+W/OJMP1e3KOYUZ99qJS4+ZfFOIrPGImcayqKoyifbJSEFkVq1MCKBbqjMZpjPj3B5ilrQAR2+TOw==}
'@types/serve-index@1.9.4':
resolution: {integrity: sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==}
- '@types/serve-static@1.15.7':
- resolution: {integrity: sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw==}
+ '@types/serve-static@1.15.8':
+ resolution: {integrity: sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==}
'@types/session-file-store@1.2.5':
resolution: {integrity: sha512-xjIyh40IznXLrvbAY/nmxu5cMcPcE3ZoDrSDvd02m6p8UjUgOtZAGI7Os5DDd6THuxClLWNhFo/awy1tYp64Bg==}
@@ -5713,14 +5719,14 @@ packages:
cheerio-select@2.1.0:
resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==}
- cheerio@1.0.0:
- resolution: {integrity: sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==}
- engines: {node: '>=18.17'}
-
cheerio@1.0.0-rc.10:
resolution: {integrity: sha512-g0J0q/O6mW8z5zxQ3A8E8J1hUgp4SMOvEoW/x84OwyHKe/Zccz83PVT4y5Crcr530FV6NgmKI1qvGTKVl9XXVw==}
engines: {node: '>= 6'}
+ cheerio@1.1.0:
+ resolution: {integrity: sha512-+0hMx9eYhJvWbgpKV9hN7jg0JcwydpopZE4hgi+KvQtByZXPp04NiCWU0LzcAbP63abZckIHkTQaXVF52mX3xQ==}
+ engines: {node: '>=18.17'}
+
chevrotain-allstar@0.3.1:
resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==}
peerDependencies:
@@ -7839,15 +7845,15 @@ packages:
htmlfy@0.6.7:
resolution: {integrity: sha512-r8hRd+oIM10lufovN+zr3VKPTYEIvIwqXGucidh2XQufmiw6sbUXFUFjWlfjo3AnefIDTyzykVzQ8IUVuT1peQ==}
+ htmlparser2@10.0.0:
+ resolution: {integrity: sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==}
+
htmlparser2@6.1.0:
resolution: {integrity: sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==}
htmlparser2@8.0.2:
resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==}
- htmlparser2@9.1.0:
- resolution: {integrity: sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==}
-
http-cache-semantics@4.1.1:
resolution: {integrity: sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==}
@@ -12277,14 +12283,14 @@ packages:
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
- undici@6.21.2:
- resolution: {integrity: sha512-uROZWze0R0itiAKVPsYhFov9LxrPMHLMEQFszeI2gCN6bnIIZ8twzBCJcN2LJrBBLfrP0t1FW0g+JmKVl8Vk1g==}
- engines: {node: '>=18.17'}
-
undici@6.21.3:
resolution: {integrity: sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==}
engines: {node: '>=18.17'}
+ undici@7.10.0:
+ resolution: {integrity: sha512-u5otvFBOBZvmdjWLVW+5DAc9Nkq8f24g0O9oY7qw2JVIF1VocIFoyz9JFkuVOS2j41AufeO0xnlweJ2RLT8nGw==}
+ engines: {node: '>=20.18.1'}
+
unescape@1.0.1:
resolution: {integrity: sha512-O0+af1Gs50lyH1nUu3ZyYS1cRh01Q/kUKatTOkSs7jukXE6/NebucDVxyiDsA9AQ4JC1V1jUH9EO8JX2nMDgGQ==}
engines: {node: '>=0.10.0'}
@@ -16200,7 +16206,7 @@ snapshots:
strict-event-emitter: 0.5.1
optional: true
- '@napi-rs/wasm-runtime@0.2.10':
+ '@napi-rs/wasm-runtime@0.2.11':
dependencies:
'@emnapi/core': 1.4.3
'@emnapi/runtime': 1.4.3
@@ -16607,7 +16613,7 @@ snapshots:
'@oxc-resolver/binding-wasm32-wasi@5.3.0':
dependencies:
- '@napi-rs/wasm-runtime': 0.2.10
+ '@napi-rs/wasm-runtime': 0.2.11
optional: true
'@oxc-resolver/binding-win32-arm64-msvc@5.3.0':
@@ -17395,7 +17401,7 @@ snapshots:
dependencies:
'@types/node': 22.15.21
- '@types/body-parser@1.19.5':
+ '@types/body-parser@1.19.6':
dependencies:
'@types/connect': 3.4.38
'@types/node': 22.15.30
@@ -17429,10 +17435,10 @@ snapshots:
'@types/color-name@1.1.5': {}
- '@types/compression@1.8.0':
+ '@types/compression@1.8.1':
dependencies:
- '@types/express': 4.17.22
- '@types/node': 22.15.21
+ '@types/express': 5.0.3
+ '@types/node': 22.15.30
'@types/connect-history-api-fallback@1.5.4':
dependencies:
@@ -17443,9 +17449,9 @@ snapshots:
dependencies:
'@types/node': 22.15.30
- '@types/cookie-parser@1.4.8(@types/express@5.0.2)':
+ '@types/cookie-parser@1.4.9(@types/express@5.0.3)':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
'@types/cookie@0.6.0':
optional: true
@@ -17603,31 +17609,31 @@ snapshots:
'@types/express-http-proxy@1.6.6':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
'@types/express-serve-static-core@5.0.6':
dependencies:
'@types/node': 22.15.30
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
- '@types/send': 0.17.4
+ '@types/send': 0.17.5
- '@types/express-session@1.18.1':
+ '@types/express-session@1.18.2':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
- '@types/express@4.17.22':
+ '@types/express@4.17.23':
dependencies:
- '@types/body-parser': 1.19.5
+ '@types/body-parser': 1.19.6
'@types/express-serve-static-core': 5.0.6
'@types/qs': 6.14.0
- '@types/serve-static': 1.15.7
+ '@types/serve-static': 1.15.8
- '@types/express@5.0.2':
+ '@types/express@5.0.3':
dependencies:
- '@types/body-parser': 1.19.5
+ '@types/body-parser': 1.19.6
'@types/express-serve-static-core': 5.0.6
- '@types/serve-static': 1.15.7
+ '@types/serve-static': 1.15.8
'@types/fs-extra@11.0.4':
dependencies:
@@ -17722,15 +17728,17 @@ snapshots:
'@types/mime-types@3.0.0': {}
+ '@types/mime-types@3.0.1': {}
+
'@types/mime@1.3.5': {}
'@types/minimatch@5.1.2': {}
'@types/ms@2.1.0': {}
- '@types/multer@1.4.12':
+ '@types/multer@1.4.13':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
'@types/node-forge@1.3.11':
dependencies:
@@ -17795,24 +17803,29 @@ snapshots:
'@types/mime': 1.3.5
'@types/node': 22.15.30
+ '@types/send@0.17.5':
+ dependencies:
+ '@types/mime': 1.3.5
+ '@types/node': 22.15.30
+
'@types/serve-favicon@2.5.7':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
'@types/serve-index@1.9.4':
dependencies:
- '@types/express': 5.0.2
+ '@types/express': 5.0.3
- '@types/serve-static@1.15.7':
+ '@types/serve-static@1.15.8':
dependencies:
'@types/http-errors': 2.0.4
- '@types/node': 22.15.21
+ '@types/node': 22.15.30
'@types/send': 0.17.4
'@types/session-file-store@1.2.5':
dependencies:
- '@types/express': 5.0.2
- '@types/express-session': 1.18.1
+ '@types/express': 5.0.3
+ '@types/express-session': 1.18.2
'@types/sinonjs__fake-timers@8.1.5': {}
@@ -17845,8 +17858,8 @@ snapshots:
'@types/swagger-ui-express@4.1.8':
dependencies:
- '@types/express': 5.0.2
- '@types/serve-static': 1.15.7
+ '@types/express': 5.0.3
+ '@types/serve-static': 1.15.8
'@types/tmp@0.2.6': {}
@@ -19116,20 +19129,6 @@ snapshots:
domhandler: 5.0.3
domutils: 3.2.2
- cheerio@1.0.0:
- dependencies:
- cheerio-select: 2.1.0
- dom-serializer: 2.0.0
- domhandler: 5.0.3
- domutils: 3.2.2
- encoding-sniffer: 0.2.0
- htmlparser2: 9.1.0
- parse5: 7.3.0
- parse5-htmlparser2-tree-adapter: 7.1.0
- parse5-parser-stream: 7.1.2
- undici: 6.21.2
- whatwg-mimetype: 4.0.0
-
cheerio@1.0.0-rc.10:
dependencies:
cheerio-select: 1.6.0
@@ -19140,6 +19139,20 @@ snapshots:
parse5-htmlparser2-tree-adapter: 6.0.1
tslib: 2.8.1
+ cheerio@1.1.0:
+ dependencies:
+ cheerio-select: 2.1.0
+ dom-serializer: 2.0.0
+ domhandler: 5.0.3
+ domutils: 3.2.2
+ encoding-sniffer: 0.2.0
+ htmlparser2: 10.0.0
+ parse5: 7.3.0
+ parse5-htmlparser2-tree-adapter: 7.1.0
+ parse5-parser-stream: 7.1.2
+ undici: 7.10.0
+ whatwg-mimetype: 4.0.0
+
chevrotain-allstar@0.3.1(chevrotain@11.0.3):
dependencies:
chevrotain: 11.0.3
@@ -21743,6 +21756,13 @@ snapshots:
htmlfy@0.6.7: {}
+ htmlparser2@10.0.0:
+ dependencies:
+ domelementtype: 2.3.0
+ domhandler: 5.0.3
+ domutils: 3.2.2
+ entities: 6.0.0
+
htmlparser2@6.1.0:
dependencies:
domelementtype: 2.3.0
@@ -21757,13 +21777,6 @@ snapshots:
domutils: 3.2.2
entities: 4.5.0
- htmlparser2@9.1.0:
- dependencies:
- domelementtype: 2.3.0
- domhandler: 5.0.3
- domutils: 3.2.2
- entities: 4.5.0
-
http-cache-semantics@4.1.1: {}
http-cache-semantics@4.2.0: {}
@@ -21818,7 +21831,7 @@ snapshots:
transitivePeerDependencies:
- supports-color
- http-proxy-middleware@2.0.9(@types/express@4.17.22):
+ http-proxy-middleware@2.0.9(@types/express@4.17.23):
dependencies:
'@types/http-proxy': 1.17.16
http-proxy: 1.18.1
@@ -21826,7 +21839,7 @@ snapshots:
is-plain-obj: 3.0.0
micromatch: 4.0.8
optionalDependencies:
- '@types/express': 4.17.22
+ '@types/express': 4.17.23
transitivePeerDependencies:
- debug
@@ -26959,10 +26972,10 @@ snapshots:
undici-types@6.21.0: {}
- undici@6.21.2: {}
-
undici@6.21.3: {}
+ undici@7.10.0: {}
+
unescape@1.0.1:
dependencies:
extend-shallow: 2.0.1
@@ -27417,7 +27430,7 @@ snapshots:
'@wdio/utils': 9.15.0
archiver: 7.0.1
aria-query: 5.3.2
- cheerio: 1.0.0
+ cheerio: 1.1.0
css-shorthand-properties: 1.1.2
css-value: 0.0.1
grapheme-splitter: 1.0.4
@@ -27461,10 +27474,10 @@ snapshots:
dependencies:
'@types/bonjour': 3.5.13
'@types/connect-history-api-fallback': 1.5.4
- '@types/express': 4.17.22
+ '@types/express': 4.17.23
'@types/express-serve-static-core': 5.0.6
'@types/serve-index': 1.9.4
- '@types/serve-static': 1.15.7
+ '@types/serve-static': 1.15.8
'@types/sockjs': 0.3.36
'@types/ws': 8.18.1
ansi-html-community: 0.0.8
@@ -27475,7 +27488,7 @@ snapshots:
connect-history-api-fallback: 2.0.0
express: 4.21.2
graceful-fs: 4.2.11
- http-proxy-middleware: 2.0.9(@types/express@4.17.22)
+ http-proxy-middleware: 2.0.9(@types/express@4.17.23)
ipaddr.js: 2.2.0
launch-editor: 2.10.0
open: 10.1.1