From e09e15ad051dcbdd54cfd51cbd1a1e339f19a4dc Mon Sep 17 00:00:00 2001 From: perf3ct Date: Sun, 2 Mar 2025 18:58:25 -0800 Subject: [PATCH] start from scratch again --- db/migrations/0230__ai_llm_options.sql | 22 ++ .../widgets/type_widgets/content_widget.ts | 2 + .../type_widgets/options/ai_settings.js | 3 + .../type_widgets/options/ai_settings.ts | 216 ++++++++++++++++++ src/public/translations/en/translation.json | 26 +++ src/routes/api/options.ts | 16 +- src/services/app_info.ts | 2 +- src/services/hidden_subtree.ts | 1 + .../llm/embeddings/base_embeddings.ts | 77 +++++++ .../llm/embeddings/embeddings_interface.ts | 65 ++++++ src/services/options_interface.ts | 15 ++ translations/en/server.json | 1 + 12 files changed, 444 insertions(+), 2 deletions(-) create mode 100644 db/migrations/0230__ai_llm_options.sql create mode 100644 src/public/app/widgets/type_widgets/options/ai_settings.js create mode 100644 src/public/app/widgets/type_widgets/options/ai_settings.ts create mode 100644 src/services/llm/embeddings/base_embeddings.ts create mode 100644 src/services/llm/embeddings/embeddings_interface.ts diff --git a/db/migrations/0230__ai_llm_options.sql b/db/migrations/0230__ai_llm_options.sql new file mode 100644 index 000000000..065373c48 --- /dev/null +++ b/db/migrations/0230__ai_llm_options.sql @@ -0,0 +1,22 @@ +-- Add new options for AI/LLM integration +INSERT INTO options (name, value, isSynced) VALUES ('aiEnabled', 'false', 1); + +-- OpenAI settings +INSERT INTO options (name, value, isSynced) VALUES ('openaiApiKey', '', 1); +INSERT INTO options (name, value, isSynced) VALUES ('openaiDefaultModel', 'gpt-4o', 1); +INSERT INTO options (name, value, isSynced) VALUES ('openaiBaseUrl', 'https://api.openai.com/v1', 1); + +-- Anthropic settings +INSERT INTO options (name, value, isSynced) VALUES ('anthropicApiKey', '', 1); +INSERT INTO options (name, value, isSynced) VALUES ('anthropicDefaultModel', 'claude-3-opus-20240229', 1); +INSERT INTO options (name, value, isSynced) VALUES ('anthropicBaseUrl', 'https://api.anthropic.com/v1', 1); + +-- Ollama settings +INSERT INTO options (name, value, isSynced) VALUES ('ollamaEnabled', 'false', 1); +INSERT INTO options (name, value, isSynced) VALUES ('ollamaBaseUrl', 'http://localhost:11434', 1); +INSERT INTO options (name, value, isSynced) VALUES ('ollamaDefaultModel', 'llama3', 1); + +-- General AI settings +INSERT INTO options (name, value, isSynced) VALUES ('aiProviderPrecedence', 'openai,anthropic,ollama', 1); +INSERT INTO options (name, value, isSynced) VALUES ('aiTemperature', '0.7', 1); +INSERT INTO options (name, value, isSynced) VALUES ('aiSystemPrompt', '', 1); \ No newline at end of file diff --git a/src/public/app/widgets/type_widgets/content_widget.ts b/src/public/app/widgets/type_widgets/content_widget.ts index 0e9f3ca22..35c616cd5 100644 --- a/src/public/app/widgets/type_widgets/content_widget.ts +++ b/src/public/app/widgets/type_widgets/content_widget.ts @@ -37,6 +37,7 @@ import LocalizationOptions from "./options/appearance/i18n.js"; import CodeBlockOptions from "./options/appearance/code_block.js"; import EditorOptions from "./options/text_notes/editor.js"; import ShareSettingsOptions from "./options/other/share_settings.js"; +import AiSettingsOptions from "./options/ai_settings.js"; import type FNote from "../../entities/fnote.js"; import type NoteContextAwareWidget from "../note_context_aware_widget.js"; @@ -70,6 +71,7 @@ const CONTENT_WIDGETS: Record = { _optionsEtapi: [EtapiOptions], _optionsBackup: [BackupOptions], _optionsSync: [SyncOptions], + _optionsAi: [AiSettingsOptions], _optionsOther: [ SearchEngineOptions, TrayOptions, diff --git a/src/public/app/widgets/type_widgets/options/ai_settings.js b/src/public/app/widgets/type_widgets/options/ai_settings.js new file mode 100644 index 000000000..17e039309 --- /dev/null +++ b/src/public/app/widgets/type_widgets/options/ai_settings.js @@ -0,0 +1,3 @@ +import AiSettingsWidget from "./ai_settings.ts"; + +export default AiSettingsWidget; diff --git a/src/public/app/widgets/type_widgets/options/ai_settings.ts b/src/public/app/widgets/type_widgets/options/ai_settings.ts new file mode 100644 index 000000000..f7c9d4900 --- /dev/null +++ b/src/public/app/widgets/type_widgets/options/ai_settings.ts @@ -0,0 +1,216 @@ +import OptionsWidget from "./options_widget.js"; +import { t } from "../../../services/i18n.js"; +import type { FilterOptionsByType, OptionMap } from "../../../../../services/options_interface.js"; + +export default class AiSettingsWidget extends OptionsWidget { + doRender() { + this.$widget = $(` +
+

${t("ai_llm.title")}

+ +
+ +
${t("ai_llm.enable_ai_description")}
+
+ +
+ +
+
${t("ai_llm.provider_configuration")}
+ +
+ + +
${t("ai_llm.provider_precedence_description")}
+
+ +
+ + +
${t("ai_llm.temperature_description")}
+
+ +
+ + +
${t("ai_llm.system_prompt_description")}
+
+
+ +
+ +
+
${t("ai_llm.openai_configuration")}
+ +
+ + +
+ +
+ + +
${t("ai_llm.openai_model_description")}
+
+ +
+ + +
${t("ai_llm.openai_url_description")}
+
+
+ +
+ +
+
${t("ai_llm.anthropic_configuration")}
+ +
+ + +
+ +
+ + +
${t("ai_llm.anthropic_model_description")}
+
+ +
+ + +
${t("ai_llm.anthropic_url_description")}
+
+
+ +
+ +
+
${t("ai_llm.ollama_configuration")}
+ +
+ +
${t("ai_llm.enable_ollama_description")}
+
+ +
+ + +
${t("ai_llm.ollama_url_description")}
+
+ +
+ + +
${t("ai_llm.ollama_model_description")}
+
+
+
`); + + const $aiEnabled = this.$widget.find('.ai-enabled'); + $aiEnabled.on('change', async () => { + await this.updateOption('aiEnabled', $aiEnabled.prop('checked') ? "true" : "false"); + this.updateAiSectionVisibility(); + }); + + const $ollamaEnabled = this.$widget.find('.ollama-enabled'); + $ollamaEnabled.on('change', async () => { + await this.updateOption('ollamaEnabled', $ollamaEnabled.prop('checked') ? "true" : "false"); + }); + + const $aiProviderPrecedence = this.$widget.find('.ai-provider-precedence'); + $aiProviderPrecedence.on('change', async () => { + await this.updateOption('aiProviderPrecedence', $aiProviderPrecedence.val() as string); + }); + + const $aiTemperature = this.$widget.find('.ai-temperature'); + $aiTemperature.on('change', async () => { + await this.updateOption('aiTemperature', $aiTemperature.val() as string); + }); + + const $aiSystemPrompt = this.$widget.find('.ai-system-prompt'); + $aiSystemPrompt.on('change', async () => { + await this.updateOption('aiSystemPrompt', $aiSystemPrompt.val() as string); + }); + + const $openaiApiKey = this.$widget.find('.openai-api-key'); + $openaiApiKey.on('change', async () => { + await this.updateOption('openaiApiKey', $openaiApiKey.val() as string); + }); + + const $openaiDefaultModel = this.$widget.find('.openai-default-model'); + $openaiDefaultModel.on('change', async () => { + await this.updateOption('openaiDefaultModel', $openaiDefaultModel.val() as string); + }); + + const $openaiBaseUrl = this.$widget.find('.openai-base-url'); + $openaiBaseUrl.on('change', async () => { + await this.updateOption('openaiBaseUrl', $openaiBaseUrl.val() as string); + }); + + const $anthropicApiKey = this.$widget.find('.anthropic-api-key'); + $anthropicApiKey.on('change', async () => { + await this.updateOption('anthropicApiKey', $anthropicApiKey.val() as string); + }); + + const $anthropicDefaultModel = this.$widget.find('.anthropic-default-model'); + $anthropicDefaultModel.on('change', async () => { + await this.updateOption('anthropicDefaultModel', $anthropicDefaultModel.val() as string); + }); + + const $anthropicBaseUrl = this.$widget.find('.anthropic-base-url'); + $anthropicBaseUrl.on('change', async () => { + await this.updateOption('anthropicBaseUrl', $anthropicBaseUrl.val() as string); + }); + + const $ollamaBaseUrl = this.$widget.find('.ollama-base-url'); + $ollamaBaseUrl.on('change', async () => { + await this.updateOption('ollamaBaseUrl', $ollamaBaseUrl.val() as string); + }); + + const $ollamaDefaultModel = this.$widget.find('.ollama-default-model'); + $ollamaDefaultModel.on('change', async () => { + await this.updateOption('ollamaDefaultModel', $ollamaDefaultModel.val() as string); + }); + + return this.$widget; + } + + updateAiSectionVisibility() { + if (!this.$widget) return; + + const aiEnabled = this.$widget.find('.ai-enabled').prop('checked'); + this.$widget.find('.ai-providers-section').toggle(aiEnabled); + this.$widget.find('.ai-provider').toggle(aiEnabled); + } + + optionsLoaded(options: OptionMap) { + if (!this.$widget) return; + + this.setCheckboxState(this.$widget.find('.ai-enabled'), options.aiEnabled); + this.setCheckboxState(this.$widget.find('.ollama-enabled'), options.ollamaEnabled); + + this.$widget.find('.ai-provider-precedence').val(options.aiProviderPrecedence); + this.$widget.find('.ai-temperature').val(options.aiTemperature); + this.$widget.find('.ai-system-prompt').val(options.aiSystemPrompt); + + this.$widget.find('.openai-api-key').val(options.openaiApiKey); + this.$widget.find('.openai-default-model').val(options.openaiDefaultModel); + this.$widget.find('.openai-base-url').val(options.openaiBaseUrl); + + this.$widget.find('.anthropic-api-key').val(options.anthropicApiKey); + this.$widget.find('.anthropic-default-model').val(options.anthropicDefaultModel); + this.$widget.find('.anthropic-base-url').val(options.anthropicBaseUrl); + + this.$widget.find('.ollama-base-url').val(options.ollamaBaseUrl); + this.$widget.find('.ollama-default-model').val(options.ollamaDefaultModel); + + this.updateAiSectionVisibility(); + } +} diff --git a/src/public/translations/en/translation.json b/src/public/translations/en/translation.json index b334f03f3..04a3249c2 100644 --- a/src/public/translations/en/translation.json +++ b/src/public/translations/en/translation.json @@ -1120,6 +1120,32 @@ "layout-vertical-description": "launcher bar is on the left (default)", "layout-horizontal-description": "launcher bar is underneath the tab bar, the tab bar is now full width." }, + "ai_llm": { + "title": "AI/LLM Integration", + "enable_ai_features": "Enable AI/LLM features", + "enable_ai_description": "Enable AI features like note summarization, content generation, and other LLM capabilities", + "provider_configuration": "AI Provider Configuration", + "provider_precedence": "Provider Precedence", + "provider_precedence_description": "Comma-separated list of providers in order of precedence (e.g., 'openai,anthropic,ollama')", + "temperature": "Temperature", + "temperature_description": "Controls randomness in responses (0 = deterministic, 2 = maximum randomness)", + "system_prompt": "System Prompt", + "system_prompt_description": "Default system prompt used for all AI interactions", + "openai_configuration": "OpenAI Configuration", + "api_key": "API Key", + "default_model": "Default Model", + "openai_model_description": "Examples: gpt-4o, gpt-4-turbo, gpt-3.5-turbo", + "base_url": "Base URL", + "openai_url_description": "Default: https://api.openai.com/v1", + "anthropic_configuration": "Anthropic Configuration", + "anthropic_model_description": "Examples: claude-3-opus-20240229, claude-3-sonnet-20240229", + "anthropic_url_description": "Default: https://api.anthropic.com/v1", + "ollama_configuration": "Ollama Configuration", + "enable_ollama": "Enable Ollama", + "enable_ollama_description": "Enable Ollama for local AI model usage", + "ollama_url_description": "Default: http://localhost:11434", + "ollama_model_description": "Examples: llama3, mistral, phi3" + }, "zoom_factor": { "title": "Zoom Factor (desktop build only)", "description": "Zooming can be controlled with CTRL+- and CTRL+= shortcuts as well." diff --git a/src/routes/api/options.ts b/src/routes/api/options.ts index 50ff6b6b6..b60b65908 100644 --- a/src/routes/api/options.ts +++ b/src/routes/api/options.ts @@ -77,7 +77,21 @@ const ALLOWED_OPTIONS = new Set([ "backgroundEffects", "allowedHtmlTags", "redirectBareDomain", - "showLoginInShareTheme" + "showLoginInShareTheme", + // AI/LLM integration options + "aiEnabled", + "openaiApiKey", + "openaiDefaultModel", + "openaiBaseUrl", + "anthropicApiKey", + "anthropicDefaultModel", + "anthropicBaseUrl", + "ollamaEnabled", + "ollamaBaseUrl", + "ollamaDefaultModel", + "aiProviderPrecedence", + "aiTemperature", + "aiSystemPrompt" ]); function getOptions() { diff --git a/src/services/app_info.ts b/src/services/app_info.ts index 63c4edfcd..16d45cb37 100644 --- a/src/services/app_info.ts +++ b/src/services/app_info.ts @@ -5,7 +5,7 @@ import build from "./build.js"; import packageJson from "../../package.json" with { type: "json" }; import dataDir from "./data_dir.js"; -const APP_DB_VERSION = 229; +const APP_DB_VERSION = 230; const SYNC_VERSION = 35; const CLIPPER_PROTOCOL_VERSION = "1.0"; diff --git a/src/services/hidden_subtree.ts b/src/services/hidden_subtree.ts index 7d2da744d..050ac88da 100644 --- a/src/services/hidden_subtree.ts +++ b/src/services/hidden_subtree.ts @@ -274,6 +274,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS { id: "_optionsEtapi", title: t("hidden-subtree.etapi-title"), type: "contentWidget", icon: "bx-extension" }, { id: "_optionsBackup", title: t("hidden-subtree.backup-title"), type: "contentWidget", icon: "bx-data" }, { id: "_optionsSync", title: t("hidden-subtree.sync-title"), type: "contentWidget", icon: "bx-wifi" }, + { id: "_optionsAi", title: t("hidden-subtree.ai-llm-title"), type: "contentWidget", icon: "bx-bot" }, { id: "_optionsOther", title: t("hidden-subtree.other"), type: "contentWidget", icon: "bx-dots-horizontal" }, { id: "_optionsAdvanced", title: t("hidden-subtree.advanced-title"), type: "contentWidget" } ] diff --git a/src/services/llm/embeddings/base_embeddings.ts b/src/services/llm/embeddings/base_embeddings.ts new file mode 100644 index 000000000..86cd56618 --- /dev/null +++ b/src/services/llm/embeddings/base_embeddings.ts @@ -0,0 +1,77 @@ +import type { EmbeddingProvider, EmbeddingConfig, NoteEmbeddingContext } from './embeddings_interface.js'; + +/** + * Base class that implements common functionality for embedding providers + */ +export abstract class BaseEmbeddingProvider implements EmbeddingProvider { + abstract name: string; + protected config: EmbeddingConfig; + + constructor(config: EmbeddingConfig) { + this.config = config; + } + + getConfig(): EmbeddingConfig { + return this.config; + } + + abstract generateEmbeddings(text: string): Promise; + abstract generateBatchEmbeddings(texts: string[]): Promise; + + /** + * Generates a rich text representation of a note's context for embedding + */ + protected generateNoteContextText(context: NoteEmbeddingContext): string { + const parts = [ + `Title: ${context.title}`, + `Type: ${context.type}`, + `MIME: ${context.mime}`, + `Created: ${context.dateCreated}`, + `Modified: ${context.dateModified}` + ]; + + if (context.attributes.length > 0) { + parts.push('Attributes:'); + for (const attr of context.attributes) { + parts.push(` ${attr.type} - ${attr.name}: ${attr.value}`); + } + } + + if (context.parentTitles.length > 0) { + parts.push('Parent Notes:'); + parts.push(...context.parentTitles.map(t => ` ${t}`)); + } + + if (context.childTitles.length > 0) { + parts.push('Child Notes:'); + parts.push(...context.childTitles.map(t => ` ${t}`)); + } + + if (context.attachments.length > 0) { + parts.push('Attachments:'); + for (const att of context.attachments) { + parts.push(` ${att.title} (${att.mime})`); + } + } + + parts.push('Content:', context.content); + + return parts.join('\n'); + } + + /** + * Default implementation that converts note context to text and generates embeddings + */ + async generateNoteEmbeddings(context: NoteEmbeddingContext): Promise { + const text = this.generateNoteContextText(context); + return this.generateEmbeddings(text); + } + + /** + * Default implementation that processes notes in batch + */ + async generateBatchNoteEmbeddings(contexts: NoteEmbeddingContext[]): Promise { + const texts = contexts.map(ctx => this.generateNoteContextText(ctx)); + return this.generateBatchEmbeddings(texts); + } +} diff --git a/src/services/llm/embeddings/embeddings_interface.ts b/src/services/llm/embeddings/embeddings_interface.ts new file mode 100644 index 000000000..8f320e066 --- /dev/null +++ b/src/services/llm/embeddings/embeddings_interface.ts @@ -0,0 +1,65 @@ +import type { NoteType, AttributeType } from "../../../becca/entities/rows.js"; + +/** + * Represents the context of a note that will be embedded + */ +export interface NoteEmbeddingContext { + noteId: string; + title: string; + content: string; + type: NoteType; + mime: string; + dateCreated: string; + dateModified: string; + attributes: { + type: AttributeType; + name: string; + value: string; + }[]; + parentTitles: string[]; + childTitles: string[]; + attachments: { + title: string; + mime: string; + }[]; +} + +/** + * Configuration for how embeddings should be generated + */ +export interface EmbeddingConfig { + model: string; + dimension: number; + type: 'float32' | 'float64'; + normalize?: boolean; + batchSize?: number; + contextWindowSize?: number; +} + +/** + * Core interface that all embedding providers must implement + */ +export interface EmbeddingProvider { + name: string; + getConfig(): EmbeddingConfig; + + /** + * Generate embeddings for a single piece of text + */ + generateEmbeddings(text: string): Promise; + + /** + * Generate embeddings for multiple pieces of text in batch + */ + generateBatchEmbeddings(texts: string[]): Promise; + + /** + * Generate embeddings for a note with its full context + */ + generateNoteEmbeddings(context: NoteEmbeddingContext): Promise; + + /** + * Generate embeddings for multiple notes with their contexts in batch + */ + generateBatchNoteEmbeddings(contexts: NoteEmbeddingContext[]): Promise; +} diff --git a/src/services/options_interface.ts b/src/services/options_interface.ts index d8d8c3fcb..eeda8a202 100644 --- a/src/services/options_interface.ts +++ b/src/services/options_interface.ts @@ -46,6 +46,21 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions