mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-28 02:22:26 +08:00
I'm 100% going to have to destroy this commit later
This commit is contained in:
parent
733fdcf8ba
commit
adaac46fbf
@ -1,22 +1,26 @@
|
|||||||
-- Add new options for AI/LLM integration
|
-- Add new options for AI/LLM integration
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('aiEnabled', 'false', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('aiEnabled', 'false', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
|
||||||
-- OpenAI settings
|
-- OpenAI settings
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiApiKey', '', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('openaiApiKey', '', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiDefaultModel', 'gpt-4o', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('openaiDefaultModel', 'gpt-4o', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiBaseUrl', 'https://api.openai.com/v1', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('openaiBaseUrl', 'https://api.openai.com/v1', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
|
||||||
-- Anthropic settings
|
-- Anthropic settings
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicApiKey', '', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('anthropicApiKey', '', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicDefaultModel', 'claude-3-opus-20240229', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('anthropicDefaultModel', 'claude-3-opus-20240229', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicBaseUrl', 'https://api.anthropic.com/v1', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('anthropicBaseUrl', 'https://api.anthropic.com/v1', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
|
||||||
-- Ollama settings
|
-- Ollama settings
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaEnabled', 'false', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('ollamaEnabled', 'false', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaBaseUrl', 'http://localhost:11434', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('ollamaBaseUrl', 'http://localhost:11434', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaDefaultModel', 'llama3', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('ollamaDefaultModel', 'llama3', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('ollamaEmbeddingModel', 'nomic-embed-text', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
|
||||||
-- General AI settings
|
-- General AI settings
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('aiProviderPrecedence', 'openai,anthropic,ollama', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('aiProviderPrecedence', 'openai,anthropic,ollama', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('aiTemperature', '0.7', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('aiTemperature', '0.7', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
INSERT INTO options (name, value, isSynced) VALUES ('aiSystemPrompt', '', 1);
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('aiSystemPrompt', '', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
||||||
|
|
||||||
|
-- Embedding settings
|
||||||
|
INSERT INTO options (name, value, isSynced, utcDateModified) VALUES ('embeddingsDefaultProvider', 'openai', 1, strftime('%Y-%m-%dT%H:%M:%fZ', 'now'));
|
@ -83,6 +83,7 @@ export type CommandMappings = {
|
|||||||
closeHlt: CommandData;
|
closeHlt: CommandData;
|
||||||
showLaunchBarSubtree: CommandData;
|
showLaunchBarSubtree: CommandData;
|
||||||
showRevisions: CommandData;
|
showRevisions: CommandData;
|
||||||
|
showLlmChat: CommandData;
|
||||||
showOptions: CommandData & {
|
showOptions: CommandData & {
|
||||||
section: string;
|
section: string;
|
||||||
};
|
};
|
||||||
|
@ -369,6 +369,11 @@ class NoteContext extends Component implements EventListener<"entitiesReloaded">
|
|||||||
|
|
||||||
const { note, viewScope } = this;
|
const { note, viewScope } = this;
|
||||||
|
|
||||||
|
// For llmChat viewMode, show a custom title
|
||||||
|
if (viewScope?.viewMode === "llmChat") {
|
||||||
|
return "Chat with Notes";
|
||||||
|
}
|
||||||
|
|
||||||
const isNormalView = (viewScope?.viewMode === "default" || viewScope?.viewMode === "contextual-help");
|
const isNormalView = (viewScope?.viewMode === "default" || viewScope?.viewMode === "contextual-help");
|
||||||
let title = (isNormalView ? note.title : `${note.title}: ${viewScope?.viewMode}`);
|
let title = (isNormalView ? note.title : `${note.title}: ${viewScope?.viewMode}`);
|
||||||
|
|
||||||
|
@ -7,8 +7,12 @@ import protectedSessionService from "../services/protected_session.js";
|
|||||||
import options from "../services/options.js";
|
import options from "../services/options.js";
|
||||||
import froca from "../services/froca.js";
|
import froca from "../services/froca.js";
|
||||||
import utils from "../services/utils.js";
|
import utils from "../services/utils.js";
|
||||||
|
import LlmChatPanel from "../widgets/llm_chat_panel.js";
|
||||||
|
import toastService from "../services/toast.js";
|
||||||
|
|
||||||
export default class RootCommandExecutor extends Component {
|
export default class RootCommandExecutor extends Component {
|
||||||
|
private llmChatPanel: any = null;
|
||||||
|
|
||||||
editReadOnlyNoteCommand() {
|
editReadOnlyNoteCommand() {
|
||||||
const noteContext = appContext.tabManager.getActiveContext();
|
const noteContext = appContext.tabManager.getActiveContext();
|
||||||
if (noteContext?.viewScope) {
|
if (noteContext?.viewScope) {
|
||||||
@ -226,4 +230,23 @@ export default class RootCommandExecutor extends Component {
|
|||||||
appContext.tabManager.activateNoteContext(tab.ntxId);
|
appContext.tabManager.activateNoteContext(tab.ntxId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async showLlmChatCommand() {
|
||||||
|
console.log("showLlmChatCommand triggered");
|
||||||
|
toastService.showMessage("Opening LLM Chat...");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// We'll use the Note Map approach - open a known note ID that corresponds to the LLM chat panel
|
||||||
|
await appContext.tabManager.openTabWithNoteWithHoisting("_globalNoteMap", {
|
||||||
|
activate: true,
|
||||||
|
viewScope: {
|
||||||
|
viewMode: "llmChat" // We'll need to handle this custom view mode elsewhere
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.error("Error opening LLM Chat:", e);
|
||||||
|
toastService.showError("Failed to open LLM Chat: " + (e as Error).message);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,6 +123,8 @@ export default class LauncherWidget extends BasicWidget {
|
|||||||
return new TodayLauncher(note);
|
return new TodayLauncher(note);
|
||||||
case "quickSearch":
|
case "quickSearch":
|
||||||
return new QuickSearchLauncherWidget(this.isHorizontalLayout);
|
return new QuickSearchLauncherWidget(this.isHorizontalLayout);
|
||||||
|
case "llmChatLauncher":
|
||||||
|
return new ScriptLauncher(note);
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unrecognized builtin widget ${builtinWidget} for launcher ${note.noteId} "${note.title}"`);
|
throw new Error(`Unrecognized builtin widget ${builtinWidget} for launcher ${note.noteId} "${note.title}"`);
|
||||||
}
|
}
|
||||||
|
246
src/public/app/widgets/llm_chat_panel.ts
Normal file
246
src/public/app/widgets/llm_chat_panel.ts
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
import BasicWidget from "./basic_widget.js";
|
||||||
|
import toastService from "../services/toast.js";
|
||||||
|
import server from "../services/server.js";
|
||||||
|
import appContext from "../components/app_context.js";
|
||||||
|
import utils from "../services/utils.js";
|
||||||
|
import { t } from "../services/i18n.js";
|
||||||
|
|
||||||
|
interface ChatResponse {
|
||||||
|
id: string;
|
||||||
|
messages: Array<{role: string; content: string}>;
|
||||||
|
sources?: Array<{noteId: string; title: string}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionResponse {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class LlmChatPanel extends BasicWidget {
|
||||||
|
private noteContextChatMessages!: HTMLElement;
|
||||||
|
private noteContextChatForm!: HTMLFormElement;
|
||||||
|
private noteContextChatInput!: HTMLTextAreaElement;
|
||||||
|
private noteContextChatSendButton!: HTMLButtonElement;
|
||||||
|
private chatContainer!: HTMLElement;
|
||||||
|
private loadingIndicator!: HTMLElement;
|
||||||
|
private sourcesList!: HTMLElement;
|
||||||
|
private sessionId: string | null = null;
|
||||||
|
private currentNoteId: string | null = null;
|
||||||
|
|
||||||
|
doRender() {
|
||||||
|
this.$widget = $(`
|
||||||
|
<div class="note-context-chat h-100 w-100 d-flex flex-column">
|
||||||
|
<div class="note-context-chat-container flex-grow-1 overflow-auto p-3">
|
||||||
|
<div class="note-context-chat-messages"></div>
|
||||||
|
<div class="loading-indicator" style="display: none;">
|
||||||
|
<div class="spinner-border spinner-border-sm text-primary" role="status">
|
||||||
|
<span class="visually-hidden">Loading...</span>
|
||||||
|
</div>
|
||||||
|
<span class="ms-2">${t('common.processing')}...</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="sources-container p-2 border-top" style="display: none;">
|
||||||
|
<h6 class="m-0 p-1">${t('ai.sources')}</h6>
|
||||||
|
<div class="sources-list"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<form class="note-context-chat-form d-flex border-top p-2">
|
||||||
|
<textarea
|
||||||
|
class="form-control note-context-chat-input"
|
||||||
|
placeholder="${t('ai.enter_message')}"
|
||||||
|
rows="3"
|
||||||
|
></textarea>
|
||||||
|
<button type="submit" class="btn btn-primary note-context-chat-send-button ms-2">
|
||||||
|
<i class="bx bx-send"></i>
|
||||||
|
</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
`);
|
||||||
|
|
||||||
|
const element = this.$widget[0];
|
||||||
|
this.noteContextChatMessages = element.querySelector('.note-context-chat-messages') as HTMLElement;
|
||||||
|
this.noteContextChatForm = element.querySelector('.note-context-chat-form') as HTMLFormElement;
|
||||||
|
this.noteContextChatInput = element.querySelector('.note-context-chat-input') as HTMLTextAreaElement;
|
||||||
|
this.noteContextChatSendButton = element.querySelector('.note-context-chat-send-button') as HTMLButtonElement;
|
||||||
|
this.chatContainer = element.querySelector('.note-context-chat-container') as HTMLElement;
|
||||||
|
this.loadingIndicator = element.querySelector('.loading-indicator') as HTMLElement;
|
||||||
|
this.sourcesList = element.querySelector('.sources-list') as HTMLElement;
|
||||||
|
|
||||||
|
this.initializeEventListeners();
|
||||||
|
|
||||||
|
// Create a session when first loaded
|
||||||
|
this.createChatSession();
|
||||||
|
|
||||||
|
return this.$widget;
|
||||||
|
}
|
||||||
|
|
||||||
|
async refresh() {
|
||||||
|
if (!this.isVisible()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current note context if needed
|
||||||
|
this.currentNoteId = appContext.tabManager.getActiveContext()?.note?.noteId || null;
|
||||||
|
|
||||||
|
if (!this.sessionId) {
|
||||||
|
// Create a new chat session
|
||||||
|
await this.createChatSession();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async createChatSession() {
|
||||||
|
try {
|
||||||
|
const resp = await server.post<SessionResponse>('llm/sessions', {
|
||||||
|
title: 'Note Chat'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (resp && resp.id) {
|
||||||
|
this.sessionId = resp.id;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to create chat session:', error);
|
||||||
|
toastService.showError('Failed to create chat session');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async sendMessage(content: string) {
|
||||||
|
if (!content.trim() || !this.sessionId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.showLoadingIndicator();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Add user message to chat
|
||||||
|
this.addMessageToChat('user', content);
|
||||||
|
this.noteContextChatInput.value = '';
|
||||||
|
|
||||||
|
// Get AI settings
|
||||||
|
const useRAG = true; // Always use RAG for this widget
|
||||||
|
|
||||||
|
// Send message to server
|
||||||
|
const response = await server.post<ChatResponse>('llm/sessions/' + this.sessionId + '/messages', {
|
||||||
|
sessionId: this.sessionId,
|
||||||
|
content: content,
|
||||||
|
options: {
|
||||||
|
useRAG: useRAG
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get the assistant's message (last one)
|
||||||
|
if (response?.messages?.length) {
|
||||||
|
const messages = response.messages;
|
||||||
|
const lastMessage = messages[messages.length - 1];
|
||||||
|
|
||||||
|
if (lastMessage && lastMessage.role === 'assistant') {
|
||||||
|
this.addMessageToChat('assistant', lastMessage.content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display sources if available
|
||||||
|
if (response?.sources?.length) {
|
||||||
|
this.showSources(response.sources);
|
||||||
|
} else {
|
||||||
|
this.hideSources();
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to send message:', error);
|
||||||
|
toastService.showError('Failed to send message to AI');
|
||||||
|
} finally {
|
||||||
|
this.hideLoadingIndicator();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private addMessageToChat(role: 'user' | 'assistant', content: string) {
|
||||||
|
const messageElement = document.createElement('div');
|
||||||
|
messageElement.className = `chat-message ${role}-message mb-3`;
|
||||||
|
|
||||||
|
const avatarElement = document.createElement('div');
|
||||||
|
avatarElement.className = 'message-avatar';
|
||||||
|
avatarElement.innerHTML = role === 'user'
|
||||||
|
? '<i class="bx bx-user"></i>'
|
||||||
|
: '<i class="bx bx-bot"></i>';
|
||||||
|
|
||||||
|
const contentElement = document.createElement('div');
|
||||||
|
contentElement.className = 'message-content p-3';
|
||||||
|
|
||||||
|
// Use a simple markdown formatter if utils.formatMarkdown is not available
|
||||||
|
let formattedContent = content
|
||||||
|
.replace(/```([\s\S]*?)```/g, '<pre><code>$1</code></pre>')
|
||||||
|
.replace(/`([^`]+)`/g, '<code>$1</code>')
|
||||||
|
.replace(/\*\*([^*]+)\*\*/g, '<strong>$1</strong>')
|
||||||
|
.replace(/\*([^*]+)\*/g, '<em>$1</em>')
|
||||||
|
.replace(/\n/g, '<br>');
|
||||||
|
|
||||||
|
contentElement.innerHTML = formattedContent;
|
||||||
|
|
||||||
|
messageElement.appendChild(avatarElement);
|
||||||
|
messageElement.appendChild(contentElement);
|
||||||
|
|
||||||
|
this.noteContextChatMessages.appendChild(messageElement);
|
||||||
|
|
||||||
|
// Scroll to bottom
|
||||||
|
this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
private showSources(sources: Array<{noteId: string, title: string}>) {
|
||||||
|
this.sourcesList.innerHTML = '';
|
||||||
|
|
||||||
|
sources.forEach(source => {
|
||||||
|
const sourceElement = document.createElement('div');
|
||||||
|
sourceElement.className = 'source-item p-1';
|
||||||
|
sourceElement.innerHTML = `<a href="#" data-note-id="${source.noteId}" class="source-link">${source.title}</a>`;
|
||||||
|
|
||||||
|
sourceElement.querySelector('.source-link')?.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
appContext.tabManager.openTabWithNoteWithHoisting(source.noteId);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.sourcesList.appendChild(sourceElement);
|
||||||
|
});
|
||||||
|
|
||||||
|
const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement;
|
||||||
|
if (sourcesContainer) {
|
||||||
|
sourcesContainer.style.display = 'block';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private hideSources() {
|
||||||
|
const sourcesContainer = this.$widget[0].querySelector('.sources-container') as HTMLElement;
|
||||||
|
if (sourcesContainer) {
|
||||||
|
sourcesContainer.style.display = 'none';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private showLoadingIndicator() {
|
||||||
|
this.loadingIndicator.style.display = 'flex';
|
||||||
|
}
|
||||||
|
|
||||||
|
private hideLoadingIndicator() {
|
||||||
|
this.loadingIndicator.style.display = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeEventListeners() {
|
||||||
|
this.noteContextChatForm.addEventListener('submit', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
const content = this.noteContextChatInput.value;
|
||||||
|
this.sendMessage(content);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add auto-resize functionality to the textarea
|
||||||
|
this.noteContextChatInput.addEventListener('input', () => {
|
||||||
|
this.noteContextChatInput.style.height = 'auto';
|
||||||
|
this.noteContextChatInput.style.height = `${this.noteContextChatInput.scrollHeight}px`;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle Enter key (send on Enter, new line on Shift+Enter)
|
||||||
|
this.noteContextChatInput.addEventListener('keydown', (e) => {
|
||||||
|
if (e.key === 'Enter' && !e.shiftKey) {
|
||||||
|
e.preventDefault();
|
||||||
|
this.noteContextChatForm.dispatchEvent(new Event('submit'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
@ -35,6 +35,7 @@ import GeoMapTypeWidget from "./type_widgets/geo_map.js";
|
|||||||
import utils from "../services/utils.js";
|
import utils from "../services/utils.js";
|
||||||
import type { NoteType } from "../entities/fnote.js";
|
import type { NoteType } from "../entities/fnote.js";
|
||||||
import type TypeWidget from "./type_widgets/type_widget.js";
|
import type TypeWidget from "./type_widgets/type_widget.js";
|
||||||
|
import LlmChatTypeWidget from "./type_widgets/llm_chat.js";
|
||||||
|
|
||||||
const TPL = `
|
const TPL = `
|
||||||
<div class="note-detail">
|
<div class="note-detail">
|
||||||
@ -72,7 +73,8 @@ const typeWidgetClasses = {
|
|||||||
attachmentDetail: AttachmentDetailTypeWidget,
|
attachmentDetail: AttachmentDetailTypeWidget,
|
||||||
attachmentList: AttachmentListTypeWidget,
|
attachmentList: AttachmentListTypeWidget,
|
||||||
mindMap: MindMapWidget,
|
mindMap: MindMapWidget,
|
||||||
geoMap: GeoMapTypeWidget
|
geoMap: GeoMapTypeWidget,
|
||||||
|
llmChat: LlmChatTypeWidget
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -88,7 +90,8 @@ type ExtendedNoteType =
|
|||||||
| "editableCode"
|
| "editableCode"
|
||||||
| "attachmentDetail"
|
| "attachmentDetail"
|
||||||
| "attachmentList"
|
| "attachmentList"
|
||||||
| "protectedSession";
|
| "protectedSession"
|
||||||
|
| "llmChat";
|
||||||
|
|
||||||
export default class NoteDetailWidget extends NoteContextAwareWidget {
|
export default class NoteDetailWidget extends NoteContextAwareWidget {
|
||||||
|
|
||||||
@ -211,17 +214,19 @@ export default class NoteDetailWidget extends NoteContextAwareWidget {
|
|||||||
|
|
||||||
async getWidgetType(): Promise<ExtendedNoteType> {
|
async getWidgetType(): Promise<ExtendedNoteType> {
|
||||||
const note = this.note;
|
const note = this.note;
|
||||||
|
|
||||||
if (!note) {
|
if (!note) {
|
||||||
return "empty";
|
return "empty";
|
||||||
}
|
}
|
||||||
|
|
||||||
let type: NoteType = note.type;
|
const type = note.type;
|
||||||
let resultingType: ExtendedNoteType;
|
let resultingType: ExtendedNoteType;
|
||||||
const viewScope = this.noteContext?.viewScope;
|
const viewScope = this.noteContext?.viewScope;
|
||||||
|
|
||||||
if (viewScope?.viewMode === "source") {
|
if (viewScope?.viewMode === "source") {
|
||||||
resultingType = "readOnlyCode";
|
resultingType = "readOnlyCode";
|
||||||
|
} else if (viewScope?.viewMode === "llmChat") {
|
||||||
|
// Special handling for our LLM Chat view mode
|
||||||
|
resultingType = "llmChat"; // This will need to be added to the ExtendedNoteType
|
||||||
} else if (viewScope && viewScope.viewMode === "attachments") {
|
} else if (viewScope && viewScope.viewMode === "attachments") {
|
||||||
resultingType = viewScope.attachmentId ? "attachmentDetail" : "attachmentList";
|
resultingType = viewScope.attachmentId ? "attachmentDetail" : "attachmentList";
|
||||||
} else if (type === "text" && (await this.noteContext?.isReadOnly())) {
|
} else if (type === "text" && (await this.noteContext?.isReadOnly())) {
|
||||||
|
51
src/public/app/widgets/type_widgets/llm_chat.ts
Normal file
51
src/public/app/widgets/type_widgets/llm_chat.ts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
import TypeWidget from "./type_widget.js";
|
||||||
|
import LlmChatPanel from "../llm_chat_panel.js";
|
||||||
|
import { type EventData } from "../../components/app_context.js";
|
||||||
|
import type FNote from "../../entities/fnote.js";
|
||||||
|
|
||||||
|
export default class LlmChatTypeWidget extends TypeWidget {
|
||||||
|
private llmChatPanel: LlmChatPanel;
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.llmChatPanel = new LlmChatPanel();
|
||||||
|
}
|
||||||
|
|
||||||
|
static getType() {
|
||||||
|
return "llmChat";
|
||||||
|
}
|
||||||
|
|
||||||
|
doRender() {
|
||||||
|
this.$widget = $('<div class="llm-chat-widget-container" style="height: 100%;"></div>');
|
||||||
|
this.$widget.append(this.llmChatPanel.render());
|
||||||
|
|
||||||
|
return this.$widget;
|
||||||
|
}
|
||||||
|
|
||||||
|
async doRefresh(note: FNote | null | undefined) {
|
||||||
|
// Initialize only once
|
||||||
|
if (!this.isInitialized) {
|
||||||
|
console.log("Initializing LLM Chat Panel");
|
||||||
|
await this.llmChatPanel.refresh();
|
||||||
|
this.isInitialized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async entitiesReloadedEvent(data: EventData<"entitiesReloaded">) {
|
||||||
|
// We don't need to refresh on entities reloaded for the chat
|
||||||
|
}
|
||||||
|
|
||||||
|
async activeContextChangedEvent(data: EventData<"activeContextChanged">) {
|
||||||
|
// Only refresh when this becomes active and we're not initialized yet
|
||||||
|
if (this.isActive() && !this.isInitialized) {
|
||||||
|
await this.llmChatPanel.refresh();
|
||||||
|
this.isInitialized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle data saving - we don't need to save anything
|
||||||
|
getData() {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
@ -159,6 +159,17 @@ export default class AiSettingsWidget extends OptionsWidget {
|
|||||||
<div class="embedding-section">
|
<div class="embedding-section">
|
||||||
<h5>${t("ai_llm.embedding_configuration")}</h5>
|
<h5>${t("ai_llm.embedding_configuration")}</h5>
|
||||||
|
|
||||||
|
<div class="form-group">
|
||||||
|
<label>${t("ai_llm.embedding_default_provider")}</label>
|
||||||
|
<select class="embedding-default-provider form-control">
|
||||||
|
<option value="openai">OpenAI</option>
|
||||||
|
<option value="anthropic">Anthropic</option>
|
||||||
|
<option value="ollama">Ollama</option>
|
||||||
|
<option value="local">Local</option>
|
||||||
|
</select>
|
||||||
|
<div class="help-text">${t("ai_llm.embedding_default_provider_description")}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label>
|
<label>
|
||||||
<input class="embedding-auto-update-enabled" type="checkbox">
|
<input class="embedding-auto-update-enabled" type="checkbox">
|
||||||
@ -351,6 +362,11 @@ export default class AiSettingsWidget extends OptionsWidget {
|
|||||||
await this.updateOption('embeddingAutoUpdateEnabled', $embeddingAutoUpdateEnabled.prop('checked') ? "true" : "false");
|
await this.updateOption('embeddingAutoUpdateEnabled', $embeddingAutoUpdateEnabled.prop('checked') ? "true" : "false");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const $embeddingDefaultProvider = this.$widget.find('.embedding-default-provider');
|
||||||
|
$embeddingDefaultProvider.on('change', async () => {
|
||||||
|
await this.updateOption('embeddingsDefaultProvider', $embeddingDefaultProvider.val() as string);
|
||||||
|
});
|
||||||
|
|
||||||
const $embeddingBatchSize = this.$widget.find('.embedding-batch-size');
|
const $embeddingBatchSize = this.$widget.find('.embedding-batch-size');
|
||||||
$embeddingBatchSize.on('change', async () => {
|
$embeddingBatchSize.on('change', async () => {
|
||||||
await this.updateOption('embeddingBatchSize', $embeddingBatchSize.val() as string);
|
await this.updateOption('embeddingBatchSize', $embeddingBatchSize.val() as string);
|
||||||
@ -543,6 +559,7 @@ export default class AiSettingsWidget extends OptionsWidget {
|
|||||||
this.$widget.find('.ollama-embedding-model').val(options.ollamaEmbeddingModel || 'nomic-embed-text');
|
this.$widget.find('.ollama-embedding-model').val(options.ollamaEmbeddingModel || 'nomic-embed-text');
|
||||||
|
|
||||||
// Load embedding options
|
// Load embedding options
|
||||||
|
this.$widget.find('.embedding-default-provider').val(options.embeddingsDefaultProvider || 'openai');
|
||||||
this.setCheckboxState(this.$widget.find('.embedding-auto-update-enabled'), options.embeddingAutoUpdateEnabled);
|
this.setCheckboxState(this.$widget.find('.embedding-auto-update-enabled'), options.embeddingAutoUpdateEnabled);
|
||||||
this.$widget.find('.embedding-batch-size').val(options.embeddingBatchSize);
|
this.$widget.find('.embedding-batch-size').val(options.embeddingBatchSize);
|
||||||
this.$widget.find('.embedding-update-interval').val(options.embeddingUpdateInterval);
|
this.$widget.find('.embedding-update-interval').val(options.embeddingUpdateInterval);
|
||||||
|
122
src/public/stylesheets/theme-next/llm-chat.css
Normal file
122
src/public/stylesheets/theme-next/llm-chat.css
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
/* LLM Chat Launcher Widget Styles */
|
||||||
|
.note-context-chat {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.note-context-chat-container {
|
||||||
|
flex-grow: 1;
|
||||||
|
overflow-y: auto;
|
||||||
|
padding: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat-message {
|
||||||
|
display: flex;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
max-width: 85%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat-message.user-message {
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.chat-message.assistant-message {
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-avatar {
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin-right: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-message .message-avatar {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.assistant-message .message-avatar {
|
||||||
|
background-color: var(--secondary-color);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-content {
|
||||||
|
background-color: var(--more-accented-background-color);
|
||||||
|
border-radius: 12px;
|
||||||
|
padding: 10px 15px;
|
||||||
|
max-width: calc(100% - 40px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-message .message-content {
|
||||||
|
background-color: var(--accented-background-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-content pre {
|
||||||
|
background-color: var(--code-background-color);
|
||||||
|
border-radius: 5px;
|
||||||
|
padding: 10px;
|
||||||
|
overflow-x: auto;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message-content code {
|
||||||
|
background-color: var(--code-background-color);
|
||||||
|
padding: 2px 4px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading-indicator {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
margin: 10px 0;
|
||||||
|
color: var(--muted-text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-container {
|
||||||
|
background-color: var(--accented-background-color);
|
||||||
|
border-top: 1px solid var(--main-border-color);
|
||||||
|
padding: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sources-list {
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-item {
|
||||||
|
padding: 4px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-link {
|
||||||
|
color: var(--link-color);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.source-link:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.note-context-chat-form {
|
||||||
|
display: flex;
|
||||||
|
background-color: var(--main-background-color);
|
||||||
|
border-top: 1px solid var(--main-border-color);
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.note-context-chat-input {
|
||||||
|
resize: vertical;
|
||||||
|
min-height: 44px;
|
||||||
|
max-height: 200px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.chat-message {
|
||||||
|
max-width: 95%;
|
||||||
|
}
|
||||||
|
}
|
721
src/routes/api/llm.ts
Normal file
721
src/routes/api/llm.ts
Normal file
@ -0,0 +1,721 @@
|
|||||||
|
import type { Request, Response } from "express";
|
||||||
|
import log from "../../services/log.js";
|
||||||
|
import options from "../../services/options.js";
|
||||||
|
// @ts-ignore
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
|
import becca from "../../becca/becca.js";
|
||||||
|
import vectorStore from "../../services/llm/embeddings/vector_store.js";
|
||||||
|
import providerManager from "../../services/llm/embeddings/providers.js";
|
||||||
|
import type { Message, ChatCompletionOptions } from "../../services/llm/ai_interface.js";
|
||||||
|
// Import this way to prevent immediate instantiation
|
||||||
|
import * as aiServiceManagerModule from "../../services/llm/ai_service_manager.js";
|
||||||
|
|
||||||
|
// Define basic interfaces
|
||||||
|
interface ChatMessage {
|
||||||
|
role: 'user' | 'assistant' | 'system';
|
||||||
|
content: string;
|
||||||
|
timestamp?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ChatSession {
|
||||||
|
id: string;
|
||||||
|
title: string;
|
||||||
|
messages: ChatMessage[];
|
||||||
|
createdAt: Date;
|
||||||
|
lastActive: Date;
|
||||||
|
noteContext?: string; // Optional noteId that provides context
|
||||||
|
metadata: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface NoteSource {
|
||||||
|
noteId: string;
|
||||||
|
title: string;
|
||||||
|
content?: string;
|
||||||
|
similarity?: number;
|
||||||
|
branchId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SessionOptions {
|
||||||
|
title?: string;
|
||||||
|
systemPrompt?: string;
|
||||||
|
temperature?: number;
|
||||||
|
maxTokens?: number;
|
||||||
|
model?: string;
|
||||||
|
provider?: string;
|
||||||
|
contextNoteId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// In-memory storage for sessions
|
||||||
|
// In a production app, this should be stored in a database
|
||||||
|
const sessions = new Map<string, ChatSession>();
|
||||||
|
|
||||||
|
// Flag to track if cleanup timer has been initialized
|
||||||
|
let cleanupInitialized = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the cleanup timer if not already running
|
||||||
|
* Only call this after database is initialized
|
||||||
|
*/
|
||||||
|
function initializeCleanupTimer() {
|
||||||
|
if (cleanupInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility function to clean sessions older than 12 hours
|
||||||
|
function cleanupOldSessions() {
|
||||||
|
const twelveHoursAgo = new Date(Date.now() - 12 * 60 * 60 * 1000);
|
||||||
|
for (const [sessionId, session] of sessions.entries()) {
|
||||||
|
if (session.lastActive < twelveHoursAgo) {
|
||||||
|
sessions.delete(sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run cleanup every hour
|
||||||
|
setInterval(cleanupOldSessions, 60 * 60 * 1000);
|
||||||
|
cleanupInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the database is initialized
|
||||||
|
*/
|
||||||
|
function isDatabaseInitialized(): boolean {
|
||||||
|
try {
|
||||||
|
options.getOption('initialized');
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the AI service manager in a way that doesn't crash at startup
|
||||||
|
*/
|
||||||
|
function safelyUseAIManager(): boolean {
|
||||||
|
// Only use AI manager if database is initialized
|
||||||
|
if (!isDatabaseInitialized()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to access the manager - will create instance only if needed
|
||||||
|
try {
|
||||||
|
return aiServiceManagerModule.default.isAnyServiceAvailable();
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Error accessing AI service manager: ${error}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new LLM chat session
|
||||||
|
*/
|
||||||
|
async function createSession(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
// Initialize cleanup if not already done
|
||||||
|
initializeCleanupTimer();
|
||||||
|
|
||||||
|
const options: SessionOptions = req.body || {};
|
||||||
|
const title = options.title || 'Chat Session';
|
||||||
|
|
||||||
|
const sessionId = uuidv4();
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
// Initial system message if provided
|
||||||
|
const messages: ChatMessage[] = [];
|
||||||
|
if (options.systemPrompt) {
|
||||||
|
messages.push({
|
||||||
|
role: 'system',
|
||||||
|
content: options.systemPrompt,
|
||||||
|
timestamp: now
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store session info
|
||||||
|
sessions.set(sessionId, {
|
||||||
|
id: sessionId,
|
||||||
|
title,
|
||||||
|
messages,
|
||||||
|
createdAt: now,
|
||||||
|
lastActive: now,
|
||||||
|
noteContext: options.contextNoteId,
|
||||||
|
metadata: {
|
||||||
|
temperature: options.temperature,
|
||||||
|
maxTokens: options.maxTokens,
|
||||||
|
model: options.model,
|
||||||
|
provider: options.provider
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: sessionId,
|
||||||
|
title,
|
||||||
|
createdAt: now
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error creating LLM session: ${error.message || 'Unknown error'}`);
|
||||||
|
throw new Error(`Failed to create LLM session: ${error.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session details
|
||||||
|
*/
|
||||||
|
async function getSession(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { sessionId } = req.params;
|
||||||
|
|
||||||
|
// Check if session exists
|
||||||
|
const session = sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
throw new Error(`Session with ID ${sessionId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return session without internal metadata
|
||||||
|
return {
|
||||||
|
id: session.id,
|
||||||
|
title: session.title,
|
||||||
|
createdAt: session.createdAt,
|
||||||
|
lastActive: session.lastActive,
|
||||||
|
messages: session.messages,
|
||||||
|
noteContext: session.noteContext
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error getting LLM session: ${error.message || 'Unknown error'}`);
|
||||||
|
throw new Error(`Failed to get session: ${error.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session properties
|
||||||
|
*/
|
||||||
|
async function updateSession(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { sessionId } = req.params;
|
||||||
|
const updates = req.body || {};
|
||||||
|
|
||||||
|
// Check if session exists
|
||||||
|
const session = sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
throw new Error(`Session with ID ${sessionId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update allowed fields
|
||||||
|
if (updates.title) {
|
||||||
|
session.title = updates.title;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.noteContext) {
|
||||||
|
session.noteContext = updates.noteContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update metadata
|
||||||
|
if (updates.temperature !== undefined) {
|
||||||
|
session.metadata.temperature = updates.temperature;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.maxTokens !== undefined) {
|
||||||
|
session.metadata.maxTokens = updates.maxTokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.model) {
|
||||||
|
session.metadata.model = updates.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updates.provider) {
|
||||||
|
session.metadata.provider = updates.provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update timestamp
|
||||||
|
session.lastActive = new Date();
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: session.id,
|
||||||
|
title: session.title,
|
||||||
|
updatedAt: session.lastActive
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error updating LLM session: ${error.message || 'Unknown error'}`);
|
||||||
|
throw new Error(`Failed to update session: ${error.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List active sessions
|
||||||
|
*/
|
||||||
|
async function listSessions(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const sessionList = Array.from(sessions.values()).map(session => ({
|
||||||
|
id: session.id,
|
||||||
|
title: session.title,
|
||||||
|
createdAt: session.createdAt,
|
||||||
|
lastActive: session.lastActive,
|
||||||
|
messageCount: session.messages.length
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Sort by last activity (most recent first)
|
||||||
|
sessionList.sort((a, b) => b.lastActive.getTime() - a.lastActive.getTime());
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessions: sessionList
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error listing LLM sessions: ${error.message || 'Unknown error'}`);
|
||||||
|
throw new Error(`Failed to list sessions: ${error.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a session
|
||||||
|
*/
|
||||||
|
async function deleteSession(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { sessionId } = req.params;
|
||||||
|
|
||||||
|
// Check if session exists
|
||||||
|
if (!sessions.has(sessionId)) {
|
||||||
|
throw new Error(`Session with ID ${sessionId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete session
|
||||||
|
sessions.delete(sessionId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Session ${sessionId} deleted successfully`
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error deleting LLM session: ${error.message || 'Unknown error'}`);
|
||||||
|
throw new Error(`Failed to delete session: ${error.message || 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find relevant notes using vector embeddings
|
||||||
|
*/
|
||||||
|
async function findRelevantNotes(query: string, contextNoteId: string | null = null, limit = 5): Promise<NoteSource[]> {
|
||||||
|
try {
|
||||||
|
// Only proceed if database is initialized
|
||||||
|
if (!isDatabaseInitialized()) {
|
||||||
|
log.info('Database not initialized, skipping vector search');
|
||||||
|
return [{
|
||||||
|
noteId: "root",
|
||||||
|
title: "Database not initialized yet",
|
||||||
|
content: "Please wait for database initialization to complete."
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the default embedding provider
|
||||||
|
let providerId;
|
||||||
|
try {
|
||||||
|
// @ts-ignore - embeddingsDefaultProvider exists but might not be in the TypeScript definitions
|
||||||
|
providerId = await options.getOption('embeddingsDefaultProvider') || 'openai';
|
||||||
|
} catch (error) {
|
||||||
|
log.info('Could not get default embedding provider, using mock data');
|
||||||
|
return [{
|
||||||
|
noteId: "root",
|
||||||
|
title: "Embeddings not configured",
|
||||||
|
content: "Embedding provider not available"
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
|
const provider = providerManager.getEmbeddingProvider(providerId);
|
||||||
|
|
||||||
|
if (!provider) {
|
||||||
|
log.info(`Embedding provider ${providerId} not found, using mock data`);
|
||||||
|
return [{
|
||||||
|
noteId: "root",
|
||||||
|
title: "Embeddings not available",
|
||||||
|
content: "No embedding provider available"
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate embedding for the query
|
||||||
|
const embedding = await provider.generateEmbeddings(query);
|
||||||
|
|
||||||
|
// Find similar notes
|
||||||
|
const modelId = 'default'; // Use default model for the provider
|
||||||
|
const similarNotes = await vectorStore.findSimilarNotes(
|
||||||
|
embedding, providerId, modelId, limit, 0.6 // Lower threshold to find more results
|
||||||
|
);
|
||||||
|
|
||||||
|
// If a context note was provided, check if we should include its children
|
||||||
|
if (contextNoteId) {
|
||||||
|
const contextNote = becca.getNote(contextNoteId);
|
||||||
|
if (contextNote) {
|
||||||
|
const childNotes = contextNote.getChildNotes();
|
||||||
|
if (childNotes.length > 0) {
|
||||||
|
// Add relevant children that weren't already included
|
||||||
|
const childIds = new Set(childNotes.map(note => note.noteId));
|
||||||
|
const existingIds = new Set(similarNotes.map(note => note.noteId));
|
||||||
|
|
||||||
|
// Find children that aren't already in the similar notes
|
||||||
|
const missingChildIds = Array.from(childIds).filter(id => !existingIds.has(id));
|
||||||
|
|
||||||
|
// Add up to 3 children that weren't already included
|
||||||
|
for (const noteId of missingChildIds.slice(0, 3)) {
|
||||||
|
similarNotes.push({
|
||||||
|
noteId,
|
||||||
|
similarity: 0.75 // Fixed similarity score for context children
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get note content for context
|
||||||
|
return await Promise.all(similarNotes.map(async ({ noteId, similarity }) => {
|
||||||
|
const note = becca.getNote(noteId);
|
||||||
|
if (!note) {
|
||||||
|
return {
|
||||||
|
noteId,
|
||||||
|
title: "Unknown Note",
|
||||||
|
similarity
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get note content
|
||||||
|
let content = '';
|
||||||
|
try {
|
||||||
|
// @ts-ignore - Content can be string or Buffer
|
||||||
|
const noteContent = await note.getContent();
|
||||||
|
content = typeof noteContent === 'string' ? noteContent : noteContent.toString('utf8');
|
||||||
|
|
||||||
|
// Truncate content if it's too long (for performance)
|
||||||
|
if (content.length > 2000) {
|
||||||
|
content = content.substring(0, 2000) + "...";
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
log.error(`Error getting content for note ${noteId}: ${e}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a branch ID for navigation
|
||||||
|
let branchId;
|
||||||
|
try {
|
||||||
|
const branches = note.getBranches();
|
||||||
|
if (branches.length > 0) {
|
||||||
|
branchId = branches[0].branchId;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
log.error(`Error getting branch for note ${noteId}: ${e}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
noteId,
|
||||||
|
title: note.title,
|
||||||
|
content,
|
||||||
|
similarity,
|
||||||
|
branchId
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
log.error(`Error finding relevant notes: ${error}`);
|
||||||
|
// Return empty array on error
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a context string from relevant notes
|
||||||
|
*/
|
||||||
|
function buildContextFromNotes(sources: NoteSource[], query: string): string {
|
||||||
|
console.log("Building context from notes with query:", query);
|
||||||
|
console.log("Sources length:", sources ? sources.length : 0);
|
||||||
|
|
||||||
|
// If no sources are available, just return the query without additional context
|
||||||
|
if (!sources || sources.length === 0) {
|
||||||
|
console.log("No sources available, using just the query");
|
||||||
|
return query || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
const noteContexts = sources
|
||||||
|
.filter(source => source.content) // Only include sources with content
|
||||||
|
.map((source, index) => {
|
||||||
|
// Format each note as a section in the context
|
||||||
|
return `[NOTE ${index + 1}: ${source.title}]\n${source.content || 'No content available'}`;
|
||||||
|
})
|
||||||
|
.join('\n\n');
|
||||||
|
|
||||||
|
if (!noteContexts) {
|
||||||
|
console.log("After filtering, no valid note contexts remain - using just the query");
|
||||||
|
return query || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a complete context prompt
|
||||||
|
return `I'll provide you with relevant notes from my knowledge base to help answer the question. Please use this information when responding:
|
||||||
|
|
||||||
|
${noteContexts}
|
||||||
|
|
||||||
|
Now, based on the above notes, please answer: ${query}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a message to an LLM chat session and get a response
|
||||||
|
*/
|
||||||
|
async function sendMessage(req: Request, res: Response) {
|
||||||
|
try {
|
||||||
|
const { sessionId, content, temperature, maxTokens, provider, model } = req.body;
|
||||||
|
|
||||||
|
console.log("Received message request:", {
|
||||||
|
sessionId,
|
||||||
|
contentLength: content ? content.length : 0,
|
||||||
|
contentPreview: content ? content.substring(0, 50) + (content.length > 50 ? '...' : '') : 'undefined',
|
||||||
|
temperature,
|
||||||
|
maxTokens,
|
||||||
|
provider,
|
||||||
|
model
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!sessionId) {
|
||||||
|
throw new Error('Session ID is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!content || typeof content !== 'string' || content.trim().length === 0) {
|
||||||
|
throw new Error('Content cannot be empty');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if streaming is requested
|
||||||
|
const wantsStream = (req.headers as any)['accept']?.includes('text/event-stream');
|
||||||
|
|
||||||
|
// If client wants streaming, set up SSE response
|
||||||
|
if (wantsStream) {
|
||||||
|
res.setHeader('Content-Type', 'text/event-stream');
|
||||||
|
res.setHeader('Cache-Control', 'no-cache');
|
||||||
|
res.setHeader('Connection', 'keep-alive');
|
||||||
|
|
||||||
|
// Get chat session
|
||||||
|
let session = sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
const newSession = await createSession(req, res);
|
||||||
|
if (!newSession) {
|
||||||
|
throw new Error('Failed to create session');
|
||||||
|
}
|
||||||
|
// Add required properties to match ChatSession interface
|
||||||
|
session = {
|
||||||
|
...newSession,
|
||||||
|
messages: [],
|
||||||
|
lastActive: new Date(),
|
||||||
|
metadata: {}
|
||||||
|
};
|
||||||
|
sessions.set(sessionId, session);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add user message to session
|
||||||
|
const userMessage: ChatMessage = {
|
||||||
|
role: 'user',
|
||||||
|
content: content,
|
||||||
|
timestamp: new Date()
|
||||||
|
};
|
||||||
|
console.log("Created user message:", {
|
||||||
|
role: userMessage.role,
|
||||||
|
contentLength: userMessage.content?.length || 0,
|
||||||
|
contentPreview: userMessage.content?.substring(0, 50) + (userMessage.content?.length > 50 ? '...' : '') || 'undefined'
|
||||||
|
});
|
||||||
|
session.messages.push(userMessage);
|
||||||
|
|
||||||
|
// Get context for query
|
||||||
|
const sources = await findRelevantNotes(content, session.noteContext || null);
|
||||||
|
|
||||||
|
// Format messages for AI with proper type casting
|
||||||
|
const aiMessages: Message[] = [
|
||||||
|
{ role: 'system', content: 'You are a helpful assistant for Trilium Notes. When providing answers, use only the context provided in the notes. If the information is not in the notes, say so.' },
|
||||||
|
{ role: 'user', content: buildContextFromNotes(sources, content) }
|
||||||
|
];
|
||||||
|
|
||||||
|
// Ensure we're not sending empty content
|
||||||
|
console.log("Final message content length:", aiMessages[1].content.length);
|
||||||
|
console.log("Final message content preview:", aiMessages[1].content.substring(0, 100));
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Send initial SSE message with session info
|
||||||
|
const sourcesForResponse = sources.map(({ noteId, title, similarity, branchId }) => ({
|
||||||
|
noteId,
|
||||||
|
title,
|
||||||
|
similarity: similarity ? Math.round(similarity * 100) / 100 : undefined,
|
||||||
|
branchId
|
||||||
|
}));
|
||||||
|
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
type: 'init',
|
||||||
|
session: {
|
||||||
|
id: sessionId,
|
||||||
|
messages: session.messages.slice(0, -1), // Don't include the new message yet
|
||||||
|
sources: sourcesForResponse
|
||||||
|
}
|
||||||
|
})}\n\n`);
|
||||||
|
|
||||||
|
// Get AI response with streaming enabled
|
||||||
|
const aiResponse = await aiServiceManagerModule.default.generateChatCompletion(aiMessages, {
|
||||||
|
temperature,
|
||||||
|
maxTokens,
|
||||||
|
model: provider ? `${provider}:${model}` : model,
|
||||||
|
stream: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (aiResponse.stream) {
|
||||||
|
// Create an empty assistant message
|
||||||
|
const assistantMessage: ChatMessage = {
|
||||||
|
role: 'assistant',
|
||||||
|
content: '',
|
||||||
|
timestamp: new Date()
|
||||||
|
};
|
||||||
|
session.messages.push(assistantMessage);
|
||||||
|
|
||||||
|
// Stream the response chunks
|
||||||
|
await aiResponse.stream(async (chunk) => {
|
||||||
|
if (chunk.text) {
|
||||||
|
// Update the message content
|
||||||
|
assistantMessage.content += chunk.text;
|
||||||
|
|
||||||
|
// Send chunk to client
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
type: 'chunk',
|
||||||
|
text: chunk.text,
|
||||||
|
done: chunk.done
|
||||||
|
})}\n\n`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunk.done) {
|
||||||
|
// Send final message with complete response
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
type: 'done',
|
||||||
|
session: {
|
||||||
|
id: sessionId,
|
||||||
|
messages: session.messages,
|
||||||
|
sources: sourcesForResponse
|
||||||
|
}
|
||||||
|
})}\n\n`);
|
||||||
|
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return; // Early return for streaming
|
||||||
|
} else {
|
||||||
|
// Fallback for non-streaming response
|
||||||
|
const assistantMessage: ChatMessage = {
|
||||||
|
role: 'assistant',
|
||||||
|
content: aiResponse.text,
|
||||||
|
timestamp: new Date()
|
||||||
|
};
|
||||||
|
session.messages.push(assistantMessage);
|
||||||
|
|
||||||
|
// Send complete response
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
type: 'done',
|
||||||
|
session: {
|
||||||
|
id: sessionId,
|
||||||
|
messages: session.messages,
|
||||||
|
sources: sourcesForResponse
|
||||||
|
}
|
||||||
|
})}\n\n`);
|
||||||
|
|
||||||
|
res.end();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
// Send error in streaming format
|
||||||
|
res.write(`data: ${JSON.stringify({
|
||||||
|
type: 'error',
|
||||||
|
error: `AI service error: ${error.message}`
|
||||||
|
})}\n\n`);
|
||||||
|
|
||||||
|
res.end();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-streaming API continues with normal JSON response...
|
||||||
|
|
||||||
|
// Get chat session
|
||||||
|
let session = sessions.get(sessionId);
|
||||||
|
if (!session) {
|
||||||
|
const newSession = await createSession(req, res);
|
||||||
|
if (!newSession) {
|
||||||
|
throw new Error('Failed to create session');
|
||||||
|
}
|
||||||
|
// Add required properties to match ChatSession interface
|
||||||
|
session = {
|
||||||
|
...newSession,
|
||||||
|
messages: [],
|
||||||
|
lastActive: new Date(),
|
||||||
|
metadata: {}
|
||||||
|
};
|
||||||
|
sessions.set(sessionId, session);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add user message to session
|
||||||
|
const userMessage: ChatMessage = {
|
||||||
|
role: 'user',
|
||||||
|
content: content,
|
||||||
|
timestamp: new Date()
|
||||||
|
};
|
||||||
|
console.log("Created user message:", {
|
||||||
|
role: userMessage.role,
|
||||||
|
contentLength: userMessage.content?.length || 0,
|
||||||
|
contentPreview: userMessage.content?.substring(0, 50) + (userMessage.content?.length > 50 ? '...' : '') || 'undefined'
|
||||||
|
});
|
||||||
|
session.messages.push(userMessage);
|
||||||
|
|
||||||
|
// Get context for query
|
||||||
|
const sources = await findRelevantNotes(content, session.noteContext || null);
|
||||||
|
|
||||||
|
// Format messages for AI with proper type casting
|
||||||
|
const aiMessages: Message[] = [
|
||||||
|
{ role: 'system', content: 'You are a helpful assistant for Trilium Notes. When providing answers, use only the context provided in the notes. If the information is not in the notes, say so.' },
|
||||||
|
{ role: 'user', content: buildContextFromNotes(sources, content) }
|
||||||
|
];
|
||||||
|
|
||||||
|
// Ensure we're not sending empty content
|
||||||
|
console.log("Final message content length:", aiMessages[1].content.length);
|
||||||
|
console.log("Final message content preview:", aiMessages[1].content.substring(0, 100));
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get AI response using the safe accessor methods
|
||||||
|
const aiResponse = await aiServiceManagerModule.default.generateChatCompletion(aiMessages, {
|
||||||
|
temperature,
|
||||||
|
maxTokens,
|
||||||
|
model: provider ? `${provider}:${model}` : model,
|
||||||
|
stream: false
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add assistant message to session
|
||||||
|
const assistantMessage: ChatMessage = {
|
||||||
|
role: 'assistant',
|
||||||
|
content: aiResponse.text,
|
||||||
|
timestamp: new Date()
|
||||||
|
};
|
||||||
|
session.messages.push(assistantMessage);
|
||||||
|
|
||||||
|
// Format sources for the response (without content to reduce payload size)
|
||||||
|
const sourcesForResponse = sources.map(({ noteId, title, similarity, branchId }) => ({
|
||||||
|
noteId,
|
||||||
|
title,
|
||||||
|
similarity: similarity ? Math.round(similarity * 100) / 100 : undefined,
|
||||||
|
branchId
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: sessionId,
|
||||||
|
messages: session.messages,
|
||||||
|
sources: sourcesForResponse,
|
||||||
|
provider: aiResponse.provider,
|
||||||
|
model: aiResponse.model
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`AI service error: ${error.message}`);
|
||||||
|
throw new Error(`AI service error: ${error.message}`);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
log.error(`Error sending message: ${error.message}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
createSession,
|
||||||
|
getSession,
|
||||||
|
updateSession,
|
||||||
|
listSessions,
|
||||||
|
deleteSession,
|
||||||
|
sendMessage
|
||||||
|
};
|
@ -14,7 +14,7 @@ async function listModels(req: Request, res: Response) {
|
|||||||
const ollamaBaseUrl = baseUrl || await options.getOption('ollamaBaseUrl') || 'http://localhost:11434';
|
const ollamaBaseUrl = baseUrl || await options.getOption('ollamaBaseUrl') || 'http://localhost:11434';
|
||||||
|
|
||||||
// Call Ollama API to get models
|
// Call Ollama API to get models
|
||||||
const response = await axios.get(`${ollamaBaseUrl}/api/tags`, {
|
const response = await axios.get(`${ollamaBaseUrl}/api/tags?format=json`, {
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
timeout: 10000
|
timeout: 10000
|
||||||
});
|
});
|
||||||
|
@ -62,6 +62,7 @@ import otherRoute from "./api/other.js";
|
|||||||
import shareRoutes from "../share/routes.js";
|
import shareRoutes from "../share/routes.js";
|
||||||
import embeddingsRoute from "./api/embeddings.js";
|
import embeddingsRoute from "./api/embeddings.js";
|
||||||
import ollamaRoute from "./api/ollama.js";
|
import ollamaRoute from "./api/ollama.js";
|
||||||
|
import llmRoute from "./api/llm.js";
|
||||||
|
|
||||||
import etapiAuthRoutes from "../etapi/auth.js";
|
import etapiAuthRoutes from "../etapi/auth.js";
|
||||||
import etapiAppInfoRoutes from "../etapi/app_info.js";
|
import etapiAppInfoRoutes from "../etapi/app_info.js";
|
||||||
@ -380,6 +381,13 @@ function register(app: express.Application) {
|
|||||||
apiRoute(GET, "/api/embeddings/queue-status", embeddingsRoute.getQueueStatus);
|
apiRoute(GET, "/api/embeddings/queue-status", embeddingsRoute.getQueueStatus);
|
||||||
apiRoute(GET, "/api/embeddings/stats", embeddingsRoute.getEmbeddingStats);
|
apiRoute(GET, "/api/embeddings/stats", embeddingsRoute.getEmbeddingStats);
|
||||||
|
|
||||||
|
apiRoute(PST, "/api/llm/sessions", llmRoute.createSession);
|
||||||
|
apiRoute(GET, "/api/llm/sessions", llmRoute.listSessions);
|
||||||
|
apiRoute(GET, "/api/llm/sessions/:sessionId", llmRoute.getSession);
|
||||||
|
apiRoute(PATCH, "/api/llm/sessions/:sessionId", llmRoute.updateSession);
|
||||||
|
apiRoute(DEL, "/api/llm/sessions/:sessionId", llmRoute.deleteSession);
|
||||||
|
apiRoute(PST, "/api/llm/sessions/:sessionId/messages", llmRoute.sendMessage);
|
||||||
|
|
||||||
// Ollama API endpoints
|
// Ollama API endpoints
|
||||||
route(PST, "/api/ollama/list-models", [auth.checkApiAuth, csrfMiddleware], ollamaRoute.listModels, apiResultHandler);
|
route(PST, "/api/ollama/list-models", [auth.checkApiAuth, csrfMiddleware], ollamaRoute.listModels, apiResultHandler);
|
||||||
|
|
||||||
|
@ -1,23 +1,29 @@
|
|||||||
import BAttribute from "../becca/entities/battribute.js";
|
import BAttribute from "../becca/entities/battribute.js";
|
||||||
import type { AttributeType, NoteType } from "../becca/entities/rows.js";
|
import type { AttributeType } from "../becca/entities/rows.js";
|
||||||
|
|
||||||
import becca from "../becca/becca.js";
|
import becca from "../becca/becca.js";
|
||||||
import noteService from "./notes.js";
|
import noteService from "./notes.js";
|
||||||
import log from "./log.js";
|
import log from "./log.js";
|
||||||
import migrationService from "./migration.js";
|
import migrationService from "./migration.js";
|
||||||
|
import options from "./options.js";
|
||||||
|
import sql from "./sql.js";
|
||||||
import { t } from "i18next";
|
import { t } from "i18next";
|
||||||
import { cleanUpHelp, getHelpHiddenSubtreeData } from "./in_app_help.js";
|
import { cleanUpHelp, getHelpHiddenSubtreeData } from "./in_app_help.js";
|
||||||
import buildLaunchBarConfig from "./hidden_subtree_launcherbar.js";
|
import buildLaunchBarConfig from "./hidden_subtree_launcherbar.js";
|
||||||
|
|
||||||
const LBTPL_ROOT = "_lbTplRoot";
|
const LBTPL_ROOT = "_lbTplRoot";
|
||||||
const LBTPL_BASE = "_lbTplBase";
|
const LBTPL_BASE = "_lbTplBase";
|
||||||
const LBTPL_COMMAND = "_lbTplCommandLauncher";
|
const LBTPL_HEADER = "_lbTplHeader";
|
||||||
const LBTPL_NOTE_LAUNCHER = "_lbTplNoteLauncher";
|
const LBTPL_NOTE = "_lbTplLauncherNote";
|
||||||
const LBTPL_SCRIPT = "_lbTplScriptLauncher";
|
const LBTPL_WIDGET = "_lbTplLauncherWidget";
|
||||||
const LBTPL_BUILTIN_WIDGET = "_lbTplBuiltinWidget";
|
const LBTPL_COMMAND = "_lbTplLauncherCommand";
|
||||||
|
const LBTPL_SCRIPT = "_lbTplLauncherScript";
|
||||||
const LBTPL_SPACER = "_lbTplSpacer";
|
const LBTPL_SPACER = "_lbTplSpacer";
|
||||||
const LBTPL_CUSTOM_WIDGET = "_lbTplCustomWidget";
|
const LBTPL_CUSTOM_WIDGET = "_lbTplCustomWidget";
|
||||||
|
|
||||||
|
// Define launcher note types locally
|
||||||
|
type LauncherNoteType = "launcher" | "search" | "doc" | "noteMap" | "contentWidget" | "book" | "file" | "image" | "text" | "relationMap" | "render" | "canvas" | "mermaid" | "webView" | "code" | "mindMap" | "geoMap";
|
||||||
|
|
||||||
interface HiddenSubtreeAttribute {
|
interface HiddenSubtreeAttribute {
|
||||||
type: AttributeType;
|
type: AttributeType;
|
||||||
name: string;
|
name: string;
|
||||||
@ -29,7 +35,7 @@ export interface HiddenSubtreeItem {
|
|||||||
notePosition?: number;
|
notePosition?: number;
|
||||||
id: string;
|
id: string;
|
||||||
title: string;
|
title: string;
|
||||||
type: NoteType;
|
type: LauncherNoteType;
|
||||||
icon?: string;
|
icon?: string;
|
||||||
attributes?: HiddenSubtreeAttribute[];
|
attributes?: HiddenSubtreeAttribute[];
|
||||||
children?: HiddenSubtreeItem[];
|
children?: HiddenSubtreeItem[];
|
||||||
@ -37,7 +43,17 @@ export interface HiddenSubtreeItem {
|
|||||||
baseSize?: string;
|
baseSize?: string;
|
||||||
growthFactor?: string;
|
growthFactor?: string;
|
||||||
targetNoteId?: "_backendLog" | "_globalNoteMap";
|
targetNoteId?: "_backendLog" | "_globalNoteMap";
|
||||||
builtinWidget?: "bookmarks" | "spacer" | "backInHistoryButton" | "forwardInHistoryButton" | "syncStatus" | "protectedSession" | "todayInJournal" | "calendar" | "quickSearch";
|
builtinWidget?:
|
||||||
|
| "todayInJournal"
|
||||||
|
| "bookmarks"
|
||||||
|
| "spacer"
|
||||||
|
| "backInHistoryButton"
|
||||||
|
| "forwardInHistoryButton"
|
||||||
|
| "syncStatus"
|
||||||
|
| "protectedSession"
|
||||||
|
| "calendar"
|
||||||
|
| "quickSearch"
|
||||||
|
| "llmChatLauncher";
|
||||||
command?: keyof typeof Command;
|
command?: keyof typeof Command;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,7 +63,8 @@ enum Command {
|
|||||||
searchNotes,
|
searchNotes,
|
||||||
createNoteIntoInbox,
|
createNoteIntoInbox,
|
||||||
showRecentChanges,
|
showRecentChanges,
|
||||||
showOptions
|
showOptions,
|
||||||
|
showLlmChat
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -143,7 +160,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: LBTPL_NOTE_LAUNCHER,
|
id: LBTPL_NOTE,
|
||||||
title: t("hidden-subtree.note-launcher-title"),
|
title: t("hidden-subtree.note-launcher-title"),
|
||||||
type: "doc",
|
type: "doc",
|
||||||
attributes: [
|
attributes: [
|
||||||
@ -168,7 +185,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: LBTPL_BUILTIN_WIDGET,
|
id: LBTPL_WIDGET,
|
||||||
title: t("hidden-subtree.built-in-widget-title"),
|
title: t("hidden-subtree.built-in-widget-title"),
|
||||||
type: "doc",
|
type: "doc",
|
||||||
attributes: [
|
attributes: [
|
||||||
@ -182,7 +199,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
|||||||
type: "doc",
|
type: "doc",
|
||||||
icon: "bx-move-vertical",
|
icon: "bx-move-vertical",
|
||||||
attributes: [
|
attributes: [
|
||||||
{ type: "relation", name: "template", value: LBTPL_BUILTIN_WIDGET },
|
{ type: "relation", name: "template", value: LBTPL_WIDGET },
|
||||||
{ type: "label", name: "builtinWidget", value: "spacer" },
|
{ type: "label", name: "builtinWidget", value: "spacer" },
|
||||||
{ type: "label", name: "label:baseSize", value: "promoted,number" },
|
{ type: "label", name: "label:baseSize", value: "promoted,number" },
|
||||||
{ type: "label", name: "label:growthFactor", value: "promoted,number" },
|
{ type: "label", name: "label:growthFactor", value: "promoted,number" },
|
||||||
@ -359,12 +376,12 @@ function checkHiddenSubtreeRecursively(parentNoteId: string, item: HiddenSubtree
|
|||||||
attrs.push({ type: "label", name: "baseSize", value: item.baseSize });
|
attrs.push({ type: "label", name: "baseSize", value: item.baseSize });
|
||||||
attrs.push({ type: "label", name: "growthFactor", value: item.growthFactor });
|
attrs.push({ type: "label", name: "growthFactor", value: item.growthFactor });
|
||||||
} else {
|
} else {
|
||||||
attrs.push({ type: "relation", name: "template", value: LBTPL_BUILTIN_WIDGET });
|
attrs.push({ type: "relation", name: "template", value: LBTPL_WIDGET });
|
||||||
}
|
}
|
||||||
|
|
||||||
attrs.push({ type: "label", name: "builtinWidget", value: item.builtinWidget });
|
attrs.push({ type: "label", name: "builtinWidget", value: item.builtinWidget });
|
||||||
} else if (item.targetNoteId) {
|
} else if (item.targetNoteId) {
|
||||||
attrs.push({ type: "relation", name: "template", value: LBTPL_NOTE_LAUNCHER });
|
attrs.push({ type: "relation", name: "template", value: LBTPL_NOTE });
|
||||||
attrs.push({ type: "relation", name: "target", value: item.targetNoteId });
|
attrs.push({ type: "relation", name: "target", value: item.targetNoteId });
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`No action defined for launcher ${JSON.stringify(item)}`);
|
throw new Error(`No action defined for launcher ${JSON.stringify(item)}`);
|
||||||
@ -429,9 +446,9 @@ export default {
|
|||||||
LBTPL_ROOT,
|
LBTPL_ROOT,
|
||||||
LBTPL_BASE,
|
LBTPL_BASE,
|
||||||
LBTPL_COMMAND,
|
LBTPL_COMMAND,
|
||||||
LBTPL_NOTE_LAUNCHER,
|
LBTPL_NOTE,
|
||||||
|
LBTPL_WIDGET,
|
||||||
LBTPL_SCRIPT,
|
LBTPL_SCRIPT,
|
||||||
LBTPL_BUILTIN_WIDGET,
|
|
||||||
LBTPL_SPACER,
|
LBTPL_SPACER,
|
||||||
LBTPL_CUSTOM_WIDGET
|
LBTPL_CUSTOM_WIDGET
|
||||||
};
|
};
|
||||||
|
@ -46,7 +46,18 @@ export default function buildLaunchBarConfig() {
|
|||||||
const desktopAvailableLaunchers: HiddenSubtreeItem[] = [
|
const desktopAvailableLaunchers: HiddenSubtreeItem[] = [
|
||||||
{ id: "_lbBackInHistory", ...sharedLaunchers.backInHistory },
|
{ id: "_lbBackInHistory", ...sharedLaunchers.backInHistory },
|
||||||
{ id: "_lbForwardInHistory", ...sharedLaunchers.forwardInHistory },
|
{ id: "_lbForwardInHistory", ...sharedLaunchers.forwardInHistory },
|
||||||
{ id: "_lbBackendLog", title: t("hidden-subtree.backend-log-title"), type: "launcher", targetNoteId: "_backendLog", icon: "bx bx-terminal" }
|
{ id: "_lbBackendLog", title: t("hidden-subtree.backend-log-title"), type: "launcher", targetNoteId: "_backendLog", icon: "bx bx-terminal" },
|
||||||
|
{
|
||||||
|
id: "_lbAvailableLlmChat",
|
||||||
|
title: t("hidden-subtree.llm-chat-title"),
|
||||||
|
type: "launcher",
|
||||||
|
command: "showLlmChat",
|
||||||
|
icon: "bx bx-bot",
|
||||||
|
attributes: [
|
||||||
|
{ type: "label", name: "desktopOnly" },
|
||||||
|
{ type: "label", name: "launcherType", value: "command" }
|
||||||
|
]
|
||||||
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
const desktopVisibleLaunchers: HiddenSubtreeItem[] = [
|
const desktopVisibleLaunchers: HiddenSubtreeItem[] = [
|
||||||
@ -68,6 +79,17 @@ export default function buildLaunchBarConfig() {
|
|||||||
attributes: [{ type: "label", name: "desktopOnly" }]
|
attributes: [{ type: "label", name: "desktopOnly" }]
|
||||||
},
|
},
|
||||||
{ id: "_lbNoteMap", title: t("hidden-subtree.note-map-title"), type: "launcher", targetNoteId: "_globalNoteMap", icon: "bx bxs-network-chart" },
|
{ id: "_lbNoteMap", title: t("hidden-subtree.note-map-title"), type: "launcher", targetNoteId: "_globalNoteMap", icon: "bx bxs-network-chart" },
|
||||||
|
{
|
||||||
|
id: "_lbLlmChat",
|
||||||
|
title: t("hidden-subtree.llm-chat-title"),
|
||||||
|
type: "launcher",
|
||||||
|
command: "showLlmChat",
|
||||||
|
icon: "bx bx-bot",
|
||||||
|
attributes: [
|
||||||
|
{ type: "label", name: "desktopOnly" },
|
||||||
|
{ type: "label", name: "launcherType", value: "command" }
|
||||||
|
]
|
||||||
|
},
|
||||||
{ id: "_lbCalendar", ...sharedLaunchers.calendar },
|
{ id: "_lbCalendar", ...sharedLaunchers.calendar },
|
||||||
{ id: "_lbRecentChanges", ...sharedLaunchers.recentChanges },
|
{ id: "_lbRecentChanges", ...sharedLaunchers.recentChanges },
|
||||||
{ id: "_lbSpacer1", title: t("hidden-subtree.spacer-title"), type: "launcher", builtinWidget: "spacer", baseSize: "50", growthFactor: "0" },
|
{ id: "_lbSpacer1", title: t("hidden-subtree.spacer-title"), type: "launcher", builtinWidget: "spacer", baseSize: "50", growthFactor: "0" },
|
||||||
|
@ -3,11 +3,23 @@ export interface Message {
|
|||||||
content: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Interface for streaming response chunks
|
||||||
|
export interface StreamChunk {
|
||||||
|
text: string;
|
||||||
|
done: boolean;
|
||||||
|
usage?: {
|
||||||
|
promptTokens?: number;
|
||||||
|
completionTokens?: number;
|
||||||
|
totalTokens?: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export interface ChatCompletionOptions {
|
export interface ChatCompletionOptions {
|
||||||
model?: string;
|
model?: string;
|
||||||
temperature?: number;
|
temperature?: number;
|
||||||
maxTokens?: number;
|
maxTokens?: number;
|
||||||
systemPrompt?: string;
|
systemPrompt?: string;
|
||||||
|
stream?: boolean; // Whether to stream the response
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatResponse {
|
export interface ChatResponse {
|
||||||
@ -19,6 +31,8 @@ export interface ChatResponse {
|
|||||||
completionTokens?: number;
|
completionTokens?: number;
|
||||||
totalTokens?: number;
|
totalTokens?: number;
|
||||||
};
|
};
|
||||||
|
// Stream handler - only present when streaming is enabled
|
||||||
|
stream?: (callback: (chunk: StreamChunk) => Promise<void> | void) => Promise<string>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AIService {
|
export interface AIService {
|
||||||
|
@ -3,6 +3,7 @@ import type { AIService, ChatCompletionOptions, ChatResponse, Message } from './
|
|||||||
import { OpenAIService } from './openai_service.js';
|
import { OpenAIService } from './openai_service.js';
|
||||||
import { AnthropicService } from './anthropic_service.js';
|
import { AnthropicService } from './anthropic_service.js';
|
||||||
import { OllamaService } from './ollama_service.js';
|
import { OllamaService } from './ollama_service.js';
|
||||||
|
import log from '../log.js';
|
||||||
|
|
||||||
type ServiceProviders = 'openai' | 'anthropic' | 'ollama';
|
type ServiceProviders = 'openai' | 'anthropic' | 'ollama';
|
||||||
|
|
||||||
@ -13,39 +14,65 @@ export class AIServiceManager {
|
|||||||
ollama: new OllamaService()
|
ollama: new OllamaService()
|
||||||
};
|
};
|
||||||
|
|
||||||
private providerOrder: ServiceProviders[] = [];
|
private providerOrder: ServiceProviders[] = ['openai', 'anthropic', 'ollama']; // Default order
|
||||||
|
private initialized = false;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.updateProviderOrder();
|
// Don't call updateProviderOrder here
|
||||||
|
// Wait until a method is called to initialize
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update the provider precedence order from saved options
|
* Update the provider precedence order from saved options
|
||||||
|
* Returns true if successful, false if options not available yet
|
||||||
*/
|
*/
|
||||||
updateProviderOrder() {
|
updateProviderOrder(): boolean {
|
||||||
// Default precedence: openai, anthropic, ollama
|
if (this.initialized) {
|
||||||
const defaultOrder: ServiceProviders[] = ['openai', 'anthropic', 'ollama'];
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
// Get custom order from options
|
try {
|
||||||
const customOrder = options.getOption('aiProviderPrecedence');
|
// Default precedence: openai, anthropic, ollama
|
||||||
|
const defaultOrder: ServiceProviders[] = ['openai', 'anthropic', 'ollama'];
|
||||||
|
|
||||||
if (customOrder) {
|
// Get custom order from options
|
||||||
try {
|
const customOrder = options.getOption('aiProviderPrecedence');
|
||||||
const parsed = JSON.parse(customOrder);
|
|
||||||
// Validate that all providers are valid
|
if (customOrder) {
|
||||||
if (Array.isArray(parsed) &&
|
try {
|
||||||
parsed.every(p => Object.keys(this.services).includes(p))) {
|
const parsed = JSON.parse(customOrder);
|
||||||
this.providerOrder = parsed as ServiceProviders[];
|
// Validate that all providers are valid
|
||||||
} else {
|
if (Array.isArray(parsed) &&
|
||||||
console.warn('Invalid AI provider precedence format, using defaults');
|
parsed.every(p => Object.keys(this.services).includes(p))) {
|
||||||
|
this.providerOrder = parsed as ServiceProviders[];
|
||||||
|
} else {
|
||||||
|
log.info('Invalid AI provider precedence format, using defaults');
|
||||||
|
this.providerOrder = defaultOrder;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
log.error(`Failed to parse AI provider precedence: ${e}`);
|
||||||
this.providerOrder = defaultOrder;
|
this.providerOrder = defaultOrder;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} else {
|
||||||
console.error('Failed to parse AI provider precedence:', e);
|
|
||||||
this.providerOrder = defaultOrder;
|
this.providerOrder = defaultOrder;
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
this.providerOrder = defaultOrder;
|
this.initialized = true;
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
// If options table doesn't exist yet, use defaults
|
||||||
|
// This happens during initial database creation
|
||||||
|
this.providerOrder = ['openai', 'anthropic', 'ollama'];
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure manager is initialized before using
|
||||||
|
*/
|
||||||
|
private ensureInitialized() {
|
||||||
|
if (!this.initialized) {
|
||||||
|
this.updateProviderOrder();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,6 +87,7 @@ export class AIServiceManager {
|
|||||||
* Get list of available providers
|
* Get list of available providers
|
||||||
*/
|
*/
|
||||||
getAvailableProviders(): ServiceProviders[] {
|
getAvailableProviders(): ServiceProviders[] {
|
||||||
|
this.ensureInitialized();
|
||||||
return Object.entries(this.services)
|
return Object.entries(this.services)
|
||||||
.filter(([_, service]) => service.isAvailable())
|
.filter(([_, service]) => service.isAvailable())
|
||||||
.map(([key, _]) => key as ServiceProviders);
|
.map(([key, _]) => key as ServiceProviders);
|
||||||
@ -70,12 +98,12 @@ export class AIServiceManager {
|
|||||||
* based on the configured precedence order
|
* based on the configured precedence order
|
||||||
*/
|
*/
|
||||||
async generateChatCompletion(messages: Message[], options: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
async generateChatCompletion(messages: Message[], options: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
||||||
|
this.ensureInitialized();
|
||||||
|
|
||||||
if (!messages || messages.length === 0) {
|
if (!messages || messages.length === 0) {
|
||||||
throw new Error('No messages provided for chat completion');
|
throw new Error('No messages provided for chat completion');
|
||||||
}
|
}
|
||||||
|
|
||||||
this.updateProviderOrder();
|
|
||||||
|
|
||||||
// Try providers in order of preference
|
// Try providers in order of preference
|
||||||
const availableProviders = this.getAvailableProviders();
|
const availableProviders = this.getAvailableProviders();
|
||||||
|
|
||||||
@ -96,7 +124,7 @@ export class AIServiceManager {
|
|||||||
const modifiedOptions = { ...options, model: modelName };
|
const modifiedOptions = { ...options, model: modelName };
|
||||||
return await this.services[providerName as ServiceProviders].generateChatCompletion(messages, modifiedOptions);
|
return await this.services[providerName as ServiceProviders].generateChatCompletion(messages, modifiedOptions);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error with specified provider ${providerName}:`, error);
|
log.error(`Error with specified provider ${providerName}: ${error}`);
|
||||||
// If the specified provider fails, continue with the fallback providers
|
// If the specified provider fails, continue with the fallback providers
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109,7 +137,7 @@ export class AIServiceManager {
|
|||||||
try {
|
try {
|
||||||
return await this.services[provider].generateChatCompletion(messages, options);
|
return await this.services[provider].generateChatCompletion(messages, options);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error with provider ${provider}:`, error);
|
log.error(`Error with provider ${provider}: ${error}`);
|
||||||
lastError = error as Error;
|
lastError = error as Error;
|
||||||
// Continue to the next provider
|
// Continue to the next provider
|
||||||
}
|
}
|
||||||
@ -120,6 +148,29 @@ export class AIServiceManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Singleton instance
|
// Don't create singleton immediately, use a lazy-loading pattern
|
||||||
const aiServiceManager = new AIServiceManager();
|
let instance: AIServiceManager | null = null;
|
||||||
export default aiServiceManager;
|
|
||||||
|
/**
|
||||||
|
* Get the AIServiceManager instance (creates it if not already created)
|
||||||
|
*/
|
||||||
|
function getInstance(): AIServiceManager {
|
||||||
|
if (!instance) {
|
||||||
|
instance = new AIServiceManager();
|
||||||
|
}
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
getInstance,
|
||||||
|
// Also export methods directly for convenience
|
||||||
|
isAnyServiceAvailable(): boolean {
|
||||||
|
return getInstance().isAnyServiceAvailable();
|
||||||
|
},
|
||||||
|
getAvailableProviders() {
|
||||||
|
return getInstance().getAvailableProviders();
|
||||||
|
},
|
||||||
|
async generateChatCompletion(messages: Message[], options: ChatCompletionOptions = {}): Promise<ChatResponse> {
|
||||||
|
return getInstance().generateChatCompletion(messages, options);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
@ -150,7 +150,8 @@ export class OllamaEmbeddingProvider extends BaseEmbeddingProvider {
|
|||||||
`${this.baseUrl}/api/embeddings`,
|
`${this.baseUrl}/api/embeddings`,
|
||||||
{
|
{
|
||||||
model: modelName,
|
model: modelName,
|
||||||
prompt: trimmedText
|
prompt: trimmedText,
|
||||||
|
format: "json"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -32,43 +32,201 @@ export class OllamaService extends BaseAIService {
|
|||||||
try {
|
try {
|
||||||
const endpoint = `${baseUrl.replace(/\/+$/, '')}/api/chat`;
|
const endpoint = `${baseUrl.replace(/\/+$/, '')}/api/chat`;
|
||||||
|
|
||||||
const response = await fetch(endpoint, {
|
// Determine if we should stream the response
|
||||||
method: 'POST',
|
const shouldStream = opts.stream === true;
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
model,
|
|
||||||
messages: formattedMessages,
|
|
||||||
options: {
|
|
||||||
temperature,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
if (shouldStream) {
|
||||||
const errorBody = await response.text();
|
// Handle streaming response
|
||||||
throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${errorBody}`);
|
const response = await fetch(endpoint, {
|
||||||
}
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model,
|
||||||
|
messages: formattedMessages,
|
||||||
|
stream: true,
|
||||||
|
options: {
|
||||||
|
temperature,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
const data = await response.json();
|
if (!response.ok) {
|
||||||
|
const errorBody = await response.text();
|
||||||
return {
|
throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${errorBody}`);
|
||||||
text: data.message?.content || "No response from Ollama",
|
|
||||||
model: data.model || model,
|
|
||||||
provider: this.getName(),
|
|
||||||
usage: {
|
|
||||||
// Ollama doesn't provide token usage in the same format
|
|
||||||
totalTokens: data.eval_count || data.prompt_eval_count || 0
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
} catch (error) {
|
// For streaming, we return an object that has a callback for handling the stream
|
||||||
console.error('Ollama service error:', error);
|
return {
|
||||||
throw error;
|
text: "", // Initial empty text that will be built up
|
||||||
|
model: model,
|
||||||
|
provider: this.getName(),
|
||||||
|
usage: {
|
||||||
|
promptTokens: 0,
|
||||||
|
completionTokens: 0,
|
||||||
|
totalTokens: 0
|
||||||
|
},
|
||||||
|
stream: async (callback) => {
|
||||||
|
if (!response.body) {
|
||||||
|
throw new Error("No response body from Ollama");
|
||||||
|
}
|
||||||
|
|
||||||
|
const reader = response.body.getReader();
|
||||||
|
let fullText = "";
|
||||||
|
let partialLine = "";
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read();
|
||||||
|
if (done) break;
|
||||||
|
|
||||||
|
// Convert the chunk to text
|
||||||
|
const chunk = new TextDecoder().decode(value);
|
||||||
|
partialLine += chunk;
|
||||||
|
|
||||||
|
// Split by lines and process each complete JSON object
|
||||||
|
const lines = partialLine.split('\n');
|
||||||
|
|
||||||
|
// Process all complete lines except the last one (which might be incomplete)
|
||||||
|
for (let i = 0; i < lines.length - 1; i++) {
|
||||||
|
const line = lines[i].trim();
|
||||||
|
if (!line) continue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(line);
|
||||||
|
console.log("Streaming chunk received:", data);
|
||||||
|
|
||||||
|
if (data.message && data.message.content) {
|
||||||
|
// Extract just the new content
|
||||||
|
const newContent = data.message.content;
|
||||||
|
// Add to full text
|
||||||
|
fullText += newContent;
|
||||||
|
// Call the callback with the new content
|
||||||
|
await callback({
|
||||||
|
text: newContent,
|
||||||
|
done: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.done) {
|
||||||
|
// Final message in the stream
|
||||||
|
await callback({
|
||||||
|
text: "",
|
||||||
|
done: true,
|
||||||
|
usage: {
|
||||||
|
promptTokens: data.prompt_eval_count || 0,
|
||||||
|
completionTokens: data.eval_count || 0,
|
||||||
|
totalTokens: (data.prompt_eval_count || 0) + (data.eval_count || 0)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error parsing JSON from Ollama stream:", err, "Line:", line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep the potentially incomplete last line for the next iteration
|
||||||
|
partialLine = lines[lines.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle any remaining content in partialLine
|
||||||
|
if (partialLine.trim()) {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(partialLine.trim());
|
||||||
|
if (data.message && data.message.content) {
|
||||||
|
fullText += data.message.content;
|
||||||
|
await callback({
|
||||||
|
text: data.message.content,
|
||||||
|
done: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error parsing final JSON from Ollama stream:", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fullText;
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error reading Ollama stream:", err);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Non-streaming response - explicitly request JSON format
|
||||||
|
console.log("Sending to Ollama with formatted messages:", JSON.stringify(formattedMessages, null, 2));
|
||||||
|
|
||||||
|
const response = await fetch(endpoint, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
model,
|
||||||
|
messages: formattedMessages,
|
||||||
|
stream: false,
|
||||||
|
options: {
|
||||||
|
temperature,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorBody = await response.text();
|
||||||
|
throw new Error(`Ollama API error: ${response.status} ${response.statusText} - ${errorBody}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawResponseText = await response.text();
|
||||||
|
console.log("Raw response from Ollama:", rawResponseText);
|
||||||
|
|
||||||
|
let data;
|
||||||
|
|
||||||
|
try {
|
||||||
|
data = JSON.parse(rawResponseText);
|
||||||
|
console.log("Parsed Ollama response:", JSON.stringify(data, null, 2));
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error("Error parsing JSON response from Ollama:", err);
|
||||||
|
console.error("Raw response:", rawResponseText);
|
||||||
|
throw new Error(`Failed to parse Ollama response as JSON: ${err.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for empty or JSON object responses
|
||||||
|
const content = data.message?.content || '';
|
||||||
|
let finalResponseText = content;
|
||||||
|
|
||||||
|
if (content === '{}' || content === '{ }' || content === '{ }') {
|
||||||
|
finalResponseText = "I don't have information about that in my notes.";
|
||||||
|
} else if (!content.trim()) {
|
||||||
|
finalResponseText = "No response was generated. Please try asking a different question.";
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
text: finalResponseText,
|
||||||
|
model: data.model || model,
|
||||||
|
provider: this.getName(),
|
||||||
|
usage: {
|
||||||
|
promptTokens: data.prompt_eval_count || 0,
|
||||||
|
completionTokens: data.eval_count || 0,
|
||||||
|
totalTokens: (data.prompt_eval_count || 0) + (data.eval_count || 0)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
console.error("Ollama service error:", error);
|
||||||
|
throw new Error(`Ollama service error: ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private formatMessages(messages: Message[], systemPrompt: string): any[] {
|
private formatMessages(messages: Message[], systemPrompt: string): any[] {
|
||||||
|
console.log("Input messages for formatting:", JSON.stringify(messages, null, 2));
|
||||||
|
|
||||||
|
// Check if there are any messages with empty content
|
||||||
|
const emptyMessages = messages.filter(msg => !msg.content || msg.content === "Empty message");
|
||||||
|
if (emptyMessages.length > 0) {
|
||||||
|
console.warn("Found messages with empty content:", emptyMessages);
|
||||||
|
}
|
||||||
|
|
||||||
// Add system message if it doesn't exist
|
// Add system message if it doesn't exist
|
||||||
const hasSystemMessage = messages.some(m => m.role === 'system');
|
const hasSystemMessage = messages.some(m => m.role === 'system');
|
||||||
let resultMessages = [...messages];
|
let resultMessages = [...messages];
|
||||||
@ -80,6 +238,21 @@ export class OllamaService extends BaseAIService {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate each message has content
|
||||||
|
resultMessages = resultMessages.map(msg => {
|
||||||
|
// Ensure each message has a valid content
|
||||||
|
if (!msg.content || typeof msg.content !== 'string') {
|
||||||
|
console.warn(`Message with role ${msg.role} has invalid content:`, msg.content);
|
||||||
|
return {
|
||||||
|
...msg,
|
||||||
|
content: msg.content || "Empty message"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return msg;
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Formatted messages for Ollama:", JSON.stringify(resultMessages, null, 2));
|
||||||
|
|
||||||
// Ollama uses the same format as OpenAI for messages
|
// Ollama uses the same format as OpenAI for messages
|
||||||
return resultMessages;
|
return resultMessages;
|
||||||
}
|
}
|
||||||
|
@ -275,6 +275,12 @@ const defaultOptions: DefaultOption[] = [
|
|||||||
{ name: "ollamaBaseUrl", value: "http://localhost:11434", isSynced: true },
|
{ name: "ollamaBaseUrl", value: "http://localhost:11434", isSynced: true },
|
||||||
{ name: "ollamaEmbeddingModel", value: "nomic-embed-text", isSynced: true },
|
{ name: "ollamaEmbeddingModel", value: "nomic-embed-text", isSynced: true },
|
||||||
{ name: "embeddingAutoUpdate", value: "true", isSynced: true },
|
{ name: "embeddingAutoUpdate", value: "true", isSynced: true },
|
||||||
|
|
||||||
|
// Adding missing AI options
|
||||||
|
{ name: "aiTemperature", value: "0.7", isSynced: true },
|
||||||
|
{ name: "aiSystemPrompt", value: "", isSynced: true },
|
||||||
|
{ name: "aiProviderPrecedence", value: "openai,anthropic,ollama", isSynced: true },
|
||||||
|
{ name: "embeddingsDefaultProvider", value: "openai", isSynced: true },
|
||||||
];
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -68,6 +68,7 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi
|
|||||||
embeddingBatchSize: number;
|
embeddingBatchSize: number;
|
||||||
embeddingDefaultDimension: number;
|
embeddingDefaultDimension: number;
|
||||||
embeddingAutoUpdate: boolean;
|
embeddingAutoUpdate: boolean;
|
||||||
|
embeddingsDefaultProvider: string;
|
||||||
|
|
||||||
lastSyncedPull: number;
|
lastSyncedPull: number;
|
||||||
lastSyncedPush: number;
|
lastSyncedPush: number;
|
||||||
|
@ -9,7 +9,7 @@ import searchService from "./search/services/search.js";
|
|||||||
import SearchContext from "./search/search_context.js";
|
import SearchContext from "./search/search_context.js";
|
||||||
import hiddenSubtree from "./hidden_subtree.js";
|
import hiddenSubtree from "./hidden_subtree.js";
|
||||||
import { t } from "i18next";
|
import { t } from "i18next";
|
||||||
const { LBTPL_NOTE_LAUNCHER, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT } = hiddenSubtree;
|
const { LBTPL_NOTE, LBTPL_CUSTOM_WIDGET, LBTPL_SPACER, LBTPL_SCRIPT } = hiddenSubtree;
|
||||||
|
|
||||||
function getInboxNote(date: string) {
|
function getInboxNote(date: string) {
|
||||||
const workspaceNote = hoistedNoteService.getWorkspaceNote();
|
const workspaceNote = hoistedNoteService.getWorkspaceNote();
|
||||||
@ -176,7 +176,7 @@ function createLauncher({ parentNoteId, launcherType, noteId }: LauncherConfig)
|
|||||||
parentNoteId: parentNoteId
|
parentNoteId: parentNoteId
|
||||||
}).note;
|
}).note;
|
||||||
|
|
||||||
note.addRelation("template", LBTPL_NOTE_LAUNCHER);
|
note.addRelation("template", LBTPL_NOTE);
|
||||||
} else if (launcherType === "script") {
|
} else if (launcherType === "script") {
|
||||||
note = createScriptLauncher(parentNoteId, noteId);
|
note = createScriptLauncher(parentNoteId, noteId);
|
||||||
} else if (launcherType === "customWidget") {
|
} else if (launcherType === "customWidget") {
|
||||||
|
@ -230,6 +230,7 @@
|
|||||||
"protected-session-title": "Protected Session",
|
"protected-session-title": "Protected Session",
|
||||||
"sync-status-title": "Sync Status",
|
"sync-status-title": "Sync Status",
|
||||||
"settings-title": "Settings",
|
"settings-title": "Settings",
|
||||||
|
"llm-chat-title": "Chat with Notes",
|
||||||
"options-title": "Options",
|
"options-title": "Options",
|
||||||
"appearance-title": "Appearance",
|
"appearance-title": "Appearance",
|
||||||
"shortcuts-title": "Shortcuts",
|
"shortcuts-title": "Shortcuts",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user