mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-28 18:42:28 +08:00
start from scratch again
This commit is contained in:
parent
67509bc92f
commit
e09e15ad05
22
db/migrations/0230__ai_llm_options.sql
Normal file
22
db/migrations/0230__ai_llm_options.sql
Normal file
@ -0,0 +1,22 @@
|
||||
-- Add new options for AI/LLM integration
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('aiEnabled', 'false', 1);
|
||||
|
||||
-- OpenAI settings
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiApiKey', '', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiDefaultModel', 'gpt-4o', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('openaiBaseUrl', 'https://api.openai.com/v1', 1);
|
||||
|
||||
-- Anthropic settings
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicApiKey', '', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicDefaultModel', 'claude-3-opus-20240229', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('anthropicBaseUrl', 'https://api.anthropic.com/v1', 1);
|
||||
|
||||
-- Ollama settings
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaEnabled', 'false', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaBaseUrl', 'http://localhost:11434', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('ollamaDefaultModel', 'llama3', 1);
|
||||
|
||||
-- General AI settings
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('aiProviderPrecedence', 'openai,anthropic,ollama', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('aiTemperature', '0.7', 1);
|
||||
INSERT INTO options (name, value, isSynced) VALUES ('aiSystemPrompt', '', 1);
|
@ -37,6 +37,7 @@ import LocalizationOptions from "./options/appearance/i18n.js";
|
||||
import CodeBlockOptions from "./options/appearance/code_block.js";
|
||||
import EditorOptions from "./options/text_notes/editor.js";
|
||||
import ShareSettingsOptions from "./options/other/share_settings.js";
|
||||
import AiSettingsOptions from "./options/ai_settings.js";
|
||||
import type FNote from "../../entities/fnote.js";
|
||||
import type NoteContextAwareWidget from "../note_context_aware_widget.js";
|
||||
|
||||
@ -70,6 +71,7 @@ const CONTENT_WIDGETS: Record<string, (typeof NoteContextAwareWidget)[]> = {
|
||||
_optionsEtapi: [EtapiOptions],
|
||||
_optionsBackup: [BackupOptions],
|
||||
_optionsSync: [SyncOptions],
|
||||
_optionsAi: [AiSettingsOptions],
|
||||
_optionsOther: [
|
||||
SearchEngineOptions,
|
||||
TrayOptions,
|
||||
|
@ -0,0 +1,3 @@
|
||||
import AiSettingsWidget from "./ai_settings.ts";
|
||||
|
||||
export default AiSettingsWidget;
|
216
src/public/app/widgets/type_widgets/options/ai_settings.ts
Normal file
216
src/public/app/widgets/type_widgets/options/ai_settings.ts
Normal file
@ -0,0 +1,216 @@
|
||||
import OptionsWidget from "./options_widget.js";
|
||||
import { t } from "../../../services/i18n.js";
|
||||
import type { FilterOptionsByType, OptionMap } from "../../../../../services/options_interface.js";
|
||||
|
||||
export default class AiSettingsWidget extends OptionsWidget {
|
||||
doRender() {
|
||||
this.$widget = $(`
|
||||
<div class="options-section">
|
||||
<h4>${t("ai_llm.title")}</h4>
|
||||
|
||||
<div class="form-group">
|
||||
<label>
|
||||
<input class="ai-enabled" type="checkbox">
|
||||
${t("ai_llm.enable_ai_features")}
|
||||
</label>
|
||||
<div class="help-text">${t("ai_llm.enable_ai_description")}</div>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
|
||||
<div class="ai-providers-section">
|
||||
<h5>${t("ai_llm.provider_configuration")}</h5>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.provider_precedence")}</label>
|
||||
<input class="ai-provider-precedence form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.provider_precedence_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.temperature")}</label>
|
||||
<input class="ai-temperature form-control" type="number" min="0" max="2" step="0.1">
|
||||
<div class="help-text">${t("ai_llm.temperature_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.system_prompt")}</label>
|
||||
<textarea class="ai-system-prompt form-control" rows="3"></textarea>
|
||||
<div class="help-text">${t("ai_llm.system_prompt_description")}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
|
||||
<div class="ai-provider">
|
||||
<h5>${t("ai_llm.openai_configuration")}</h5>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.api_key")}</label>
|
||||
<input class="openai-api-key form-control" type="password">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.default_model")}</label>
|
||||
<input class="openai-default-model form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.openai_model_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.base_url")}</label>
|
||||
<input class="openai-base-url form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.openai_url_description")}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
|
||||
<div class="ai-provider">
|
||||
<h5>${t("ai_llm.anthropic_configuration")}</h5>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.api_key")}</label>
|
||||
<input class="anthropic-api-key form-control" type="password">
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.default_model")}</label>
|
||||
<input class="anthropic-default-model form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.anthropic_model_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.base_url")}</label>
|
||||
<input class="anthropic-base-url form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.anthropic_url_description")}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr />
|
||||
|
||||
<div class="ai-provider">
|
||||
<h5>${t("ai_llm.ollama_configuration")}</h5>
|
||||
|
||||
<div class="form-group">
|
||||
<label>
|
||||
<input class="ollama-enabled" type="checkbox">
|
||||
${t("ai_llm.enable_ollama")}
|
||||
</label>
|
||||
<div class="help-text">${t("ai_llm.enable_ollama_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.base_url")}</label>
|
||||
<input class="ollama-base-url form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.ollama_url_description")}</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label>${t("ai_llm.default_model")}</label>
|
||||
<input class="ollama-default-model form-control" type="text">
|
||||
<div class="help-text">${t("ai_llm.ollama_model_description")}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>`);
|
||||
|
||||
const $aiEnabled = this.$widget.find('.ai-enabled');
|
||||
$aiEnabled.on('change', async () => {
|
||||
await this.updateOption('aiEnabled', $aiEnabled.prop('checked') ? "true" : "false");
|
||||
this.updateAiSectionVisibility();
|
||||
});
|
||||
|
||||
const $ollamaEnabled = this.$widget.find('.ollama-enabled');
|
||||
$ollamaEnabled.on('change', async () => {
|
||||
await this.updateOption('ollamaEnabled', $ollamaEnabled.prop('checked') ? "true" : "false");
|
||||
});
|
||||
|
||||
const $aiProviderPrecedence = this.$widget.find('.ai-provider-precedence');
|
||||
$aiProviderPrecedence.on('change', async () => {
|
||||
await this.updateOption('aiProviderPrecedence', $aiProviderPrecedence.val() as string);
|
||||
});
|
||||
|
||||
const $aiTemperature = this.$widget.find('.ai-temperature');
|
||||
$aiTemperature.on('change', async () => {
|
||||
await this.updateOption('aiTemperature', $aiTemperature.val() as string);
|
||||
});
|
||||
|
||||
const $aiSystemPrompt = this.$widget.find('.ai-system-prompt');
|
||||
$aiSystemPrompt.on('change', async () => {
|
||||
await this.updateOption('aiSystemPrompt', $aiSystemPrompt.val() as string);
|
||||
});
|
||||
|
||||
const $openaiApiKey = this.$widget.find('.openai-api-key');
|
||||
$openaiApiKey.on('change', async () => {
|
||||
await this.updateOption('openaiApiKey', $openaiApiKey.val() as string);
|
||||
});
|
||||
|
||||
const $openaiDefaultModel = this.$widget.find('.openai-default-model');
|
||||
$openaiDefaultModel.on('change', async () => {
|
||||
await this.updateOption('openaiDefaultModel', $openaiDefaultModel.val() as string);
|
||||
});
|
||||
|
||||
const $openaiBaseUrl = this.$widget.find('.openai-base-url');
|
||||
$openaiBaseUrl.on('change', async () => {
|
||||
await this.updateOption('openaiBaseUrl', $openaiBaseUrl.val() as string);
|
||||
});
|
||||
|
||||
const $anthropicApiKey = this.$widget.find('.anthropic-api-key');
|
||||
$anthropicApiKey.on('change', async () => {
|
||||
await this.updateOption('anthropicApiKey', $anthropicApiKey.val() as string);
|
||||
});
|
||||
|
||||
const $anthropicDefaultModel = this.$widget.find('.anthropic-default-model');
|
||||
$anthropicDefaultModel.on('change', async () => {
|
||||
await this.updateOption('anthropicDefaultModel', $anthropicDefaultModel.val() as string);
|
||||
});
|
||||
|
||||
const $anthropicBaseUrl = this.$widget.find('.anthropic-base-url');
|
||||
$anthropicBaseUrl.on('change', async () => {
|
||||
await this.updateOption('anthropicBaseUrl', $anthropicBaseUrl.val() as string);
|
||||
});
|
||||
|
||||
const $ollamaBaseUrl = this.$widget.find('.ollama-base-url');
|
||||
$ollamaBaseUrl.on('change', async () => {
|
||||
await this.updateOption('ollamaBaseUrl', $ollamaBaseUrl.val() as string);
|
||||
});
|
||||
|
||||
const $ollamaDefaultModel = this.$widget.find('.ollama-default-model');
|
||||
$ollamaDefaultModel.on('change', async () => {
|
||||
await this.updateOption('ollamaDefaultModel', $ollamaDefaultModel.val() as string);
|
||||
});
|
||||
|
||||
return this.$widget;
|
||||
}
|
||||
|
||||
updateAiSectionVisibility() {
|
||||
if (!this.$widget) return;
|
||||
|
||||
const aiEnabled = this.$widget.find('.ai-enabled').prop('checked');
|
||||
this.$widget.find('.ai-providers-section').toggle(aiEnabled);
|
||||
this.$widget.find('.ai-provider').toggle(aiEnabled);
|
||||
}
|
||||
|
||||
optionsLoaded(options: OptionMap) {
|
||||
if (!this.$widget) return;
|
||||
|
||||
this.setCheckboxState(this.$widget.find('.ai-enabled'), options.aiEnabled);
|
||||
this.setCheckboxState(this.$widget.find('.ollama-enabled'), options.ollamaEnabled);
|
||||
|
||||
this.$widget.find('.ai-provider-precedence').val(options.aiProviderPrecedence);
|
||||
this.$widget.find('.ai-temperature').val(options.aiTemperature);
|
||||
this.$widget.find('.ai-system-prompt').val(options.aiSystemPrompt);
|
||||
|
||||
this.$widget.find('.openai-api-key').val(options.openaiApiKey);
|
||||
this.$widget.find('.openai-default-model').val(options.openaiDefaultModel);
|
||||
this.$widget.find('.openai-base-url').val(options.openaiBaseUrl);
|
||||
|
||||
this.$widget.find('.anthropic-api-key').val(options.anthropicApiKey);
|
||||
this.$widget.find('.anthropic-default-model').val(options.anthropicDefaultModel);
|
||||
this.$widget.find('.anthropic-base-url').val(options.anthropicBaseUrl);
|
||||
|
||||
this.$widget.find('.ollama-base-url').val(options.ollamaBaseUrl);
|
||||
this.$widget.find('.ollama-default-model').val(options.ollamaDefaultModel);
|
||||
|
||||
this.updateAiSectionVisibility();
|
||||
}
|
||||
}
|
@ -1120,6 +1120,32 @@
|
||||
"layout-vertical-description": "launcher bar is on the left (default)",
|
||||
"layout-horizontal-description": "launcher bar is underneath the tab bar, the tab bar is now full width."
|
||||
},
|
||||
"ai_llm": {
|
||||
"title": "AI/LLM Integration",
|
||||
"enable_ai_features": "Enable AI/LLM features",
|
||||
"enable_ai_description": "Enable AI features like note summarization, content generation, and other LLM capabilities",
|
||||
"provider_configuration": "AI Provider Configuration",
|
||||
"provider_precedence": "Provider Precedence",
|
||||
"provider_precedence_description": "Comma-separated list of providers in order of precedence (e.g., 'openai,anthropic,ollama')",
|
||||
"temperature": "Temperature",
|
||||
"temperature_description": "Controls randomness in responses (0 = deterministic, 2 = maximum randomness)",
|
||||
"system_prompt": "System Prompt",
|
||||
"system_prompt_description": "Default system prompt used for all AI interactions",
|
||||
"openai_configuration": "OpenAI Configuration",
|
||||
"api_key": "API Key",
|
||||
"default_model": "Default Model",
|
||||
"openai_model_description": "Examples: gpt-4o, gpt-4-turbo, gpt-3.5-turbo",
|
||||
"base_url": "Base URL",
|
||||
"openai_url_description": "Default: https://api.openai.com/v1",
|
||||
"anthropic_configuration": "Anthropic Configuration",
|
||||
"anthropic_model_description": "Examples: claude-3-opus-20240229, claude-3-sonnet-20240229",
|
||||
"anthropic_url_description": "Default: https://api.anthropic.com/v1",
|
||||
"ollama_configuration": "Ollama Configuration",
|
||||
"enable_ollama": "Enable Ollama",
|
||||
"enable_ollama_description": "Enable Ollama for local AI model usage",
|
||||
"ollama_url_description": "Default: http://localhost:11434",
|
||||
"ollama_model_description": "Examples: llama3, mistral, phi3"
|
||||
},
|
||||
"zoom_factor": {
|
||||
"title": "Zoom Factor (desktop build only)",
|
||||
"description": "Zooming can be controlled with CTRL+- and CTRL+= shortcuts as well."
|
||||
|
@ -77,7 +77,21 @@ const ALLOWED_OPTIONS = new Set([
|
||||
"backgroundEffects",
|
||||
"allowedHtmlTags",
|
||||
"redirectBareDomain",
|
||||
"showLoginInShareTheme"
|
||||
"showLoginInShareTheme",
|
||||
// AI/LLM integration options
|
||||
"aiEnabled",
|
||||
"openaiApiKey",
|
||||
"openaiDefaultModel",
|
||||
"openaiBaseUrl",
|
||||
"anthropicApiKey",
|
||||
"anthropicDefaultModel",
|
||||
"anthropicBaseUrl",
|
||||
"ollamaEnabled",
|
||||
"ollamaBaseUrl",
|
||||
"ollamaDefaultModel",
|
||||
"aiProviderPrecedence",
|
||||
"aiTemperature",
|
||||
"aiSystemPrompt"
|
||||
]);
|
||||
|
||||
function getOptions() {
|
||||
|
@ -5,7 +5,7 @@ import build from "./build.js";
|
||||
import packageJson from "../../package.json" with { type: "json" };
|
||||
import dataDir from "./data_dir.js";
|
||||
|
||||
const APP_DB_VERSION = 229;
|
||||
const APP_DB_VERSION = 230;
|
||||
const SYNC_VERSION = 35;
|
||||
const CLIPPER_PROTOCOL_VERSION = "1.0";
|
||||
|
||||
|
@ -274,6 +274,7 @@ function buildHiddenSubtreeDefinition(helpSubtree: HiddenSubtreeItem[]): HiddenS
|
||||
{ id: "_optionsEtapi", title: t("hidden-subtree.etapi-title"), type: "contentWidget", icon: "bx-extension" },
|
||||
{ id: "_optionsBackup", title: t("hidden-subtree.backup-title"), type: "contentWidget", icon: "bx-data" },
|
||||
{ id: "_optionsSync", title: t("hidden-subtree.sync-title"), type: "contentWidget", icon: "bx-wifi" },
|
||||
{ id: "_optionsAi", title: t("hidden-subtree.ai-llm-title"), type: "contentWidget", icon: "bx-bot" },
|
||||
{ id: "_optionsOther", title: t("hidden-subtree.other"), type: "contentWidget", icon: "bx-dots-horizontal" },
|
||||
{ id: "_optionsAdvanced", title: t("hidden-subtree.advanced-title"), type: "contentWidget" }
|
||||
]
|
||||
|
77
src/services/llm/embeddings/base_embeddings.ts
Normal file
77
src/services/llm/embeddings/base_embeddings.ts
Normal file
@ -0,0 +1,77 @@
|
||||
import type { EmbeddingProvider, EmbeddingConfig, NoteEmbeddingContext } from './embeddings_interface.js';
|
||||
|
||||
/**
|
||||
* Base class that implements common functionality for embedding providers
|
||||
*/
|
||||
export abstract class BaseEmbeddingProvider implements EmbeddingProvider {
|
||||
abstract name: string;
|
||||
protected config: EmbeddingConfig;
|
||||
|
||||
constructor(config: EmbeddingConfig) {
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
getConfig(): EmbeddingConfig {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
abstract generateEmbeddings(text: string): Promise<Float32Array>;
|
||||
abstract generateBatchEmbeddings(texts: string[]): Promise<Float32Array[]>;
|
||||
|
||||
/**
|
||||
* Generates a rich text representation of a note's context for embedding
|
||||
*/
|
||||
protected generateNoteContextText(context: NoteEmbeddingContext): string {
|
||||
const parts = [
|
||||
`Title: ${context.title}`,
|
||||
`Type: ${context.type}`,
|
||||
`MIME: ${context.mime}`,
|
||||
`Created: ${context.dateCreated}`,
|
||||
`Modified: ${context.dateModified}`
|
||||
];
|
||||
|
||||
if (context.attributes.length > 0) {
|
||||
parts.push('Attributes:');
|
||||
for (const attr of context.attributes) {
|
||||
parts.push(` ${attr.type} - ${attr.name}: ${attr.value}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (context.parentTitles.length > 0) {
|
||||
parts.push('Parent Notes:');
|
||||
parts.push(...context.parentTitles.map(t => ` ${t}`));
|
||||
}
|
||||
|
||||
if (context.childTitles.length > 0) {
|
||||
parts.push('Child Notes:');
|
||||
parts.push(...context.childTitles.map(t => ` ${t}`));
|
||||
}
|
||||
|
||||
if (context.attachments.length > 0) {
|
||||
parts.push('Attachments:');
|
||||
for (const att of context.attachments) {
|
||||
parts.push(` ${att.title} (${att.mime})`);
|
||||
}
|
||||
}
|
||||
|
||||
parts.push('Content:', context.content);
|
||||
|
||||
return parts.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Default implementation that converts note context to text and generates embeddings
|
||||
*/
|
||||
async generateNoteEmbeddings(context: NoteEmbeddingContext): Promise<Float32Array> {
|
||||
const text = this.generateNoteContextText(context);
|
||||
return this.generateEmbeddings(text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Default implementation that processes notes in batch
|
||||
*/
|
||||
async generateBatchNoteEmbeddings(contexts: NoteEmbeddingContext[]): Promise<Float32Array[]> {
|
||||
const texts = contexts.map(ctx => this.generateNoteContextText(ctx));
|
||||
return this.generateBatchEmbeddings(texts);
|
||||
}
|
||||
}
|
65
src/services/llm/embeddings/embeddings_interface.ts
Normal file
65
src/services/llm/embeddings/embeddings_interface.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import type { NoteType, AttributeType } from "../../../becca/entities/rows.js";
|
||||
|
||||
/**
|
||||
* Represents the context of a note that will be embedded
|
||||
*/
|
||||
export interface NoteEmbeddingContext {
|
||||
noteId: string;
|
||||
title: string;
|
||||
content: string;
|
||||
type: NoteType;
|
||||
mime: string;
|
||||
dateCreated: string;
|
||||
dateModified: string;
|
||||
attributes: {
|
||||
type: AttributeType;
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
parentTitles: string[];
|
||||
childTitles: string[];
|
||||
attachments: {
|
||||
title: string;
|
||||
mime: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for how embeddings should be generated
|
||||
*/
|
||||
export interface EmbeddingConfig {
|
||||
model: string;
|
||||
dimension: number;
|
||||
type: 'float32' | 'float64';
|
||||
normalize?: boolean;
|
||||
batchSize?: number;
|
||||
contextWindowSize?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Core interface that all embedding providers must implement
|
||||
*/
|
||||
export interface EmbeddingProvider {
|
||||
name: string;
|
||||
getConfig(): EmbeddingConfig;
|
||||
|
||||
/**
|
||||
* Generate embeddings for a single piece of text
|
||||
*/
|
||||
generateEmbeddings(text: string): Promise<Float32Array>;
|
||||
|
||||
/**
|
||||
* Generate embeddings for multiple pieces of text in batch
|
||||
*/
|
||||
generateBatchEmbeddings(texts: string[]): Promise<Float32Array[]>;
|
||||
|
||||
/**
|
||||
* Generate embeddings for a note with its full context
|
||||
*/
|
||||
generateNoteEmbeddings(context: NoteEmbeddingContext): Promise<Float32Array>;
|
||||
|
||||
/**
|
||||
* Generate embeddings for multiple notes with their contexts in batch
|
||||
*/
|
||||
generateBatchNoteEmbeddings(contexts: NoteEmbeddingContext[]): Promise<Float32Array[]>;
|
||||
}
|
@ -46,6 +46,21 @@ export interface OptionDefinitions extends KeyboardShortcutsOptions<KeyboardActi
|
||||
passwordDerivedKeySalt: string;
|
||||
encryptedDataKey: string;
|
||||
|
||||
// AI/LLM integration options
|
||||
aiEnabled: string;
|
||||
openaiApiKey: string;
|
||||
openaiDefaultModel: string;
|
||||
openaiBaseUrl: string;
|
||||
anthropicApiKey: string;
|
||||
anthropicDefaultModel: string;
|
||||
anthropicBaseUrl: string;
|
||||
ollamaEnabled: string;
|
||||
ollamaBaseUrl: string;
|
||||
ollamaDefaultModel: string;
|
||||
aiProviderPrecedence: string;
|
||||
aiTemperature: string;
|
||||
aiSystemPrompt: string;
|
||||
|
||||
lastSyncedPull: number;
|
||||
lastSyncedPush: number;
|
||||
revisionSnapshotTimeInterval: number;
|
||||
|
@ -241,6 +241,7 @@
|
||||
"etapi-title": "ETAPI",
|
||||
"backup-title": "Backup",
|
||||
"sync-title": "Sync",
|
||||
"ai-llm-title": "AI/LLM",
|
||||
"other": "Other",
|
||||
"advanced-title": "Advanced",
|
||||
"visible-launchers-title": "Visible Launchers",
|
||||
|
Loading…
x
Reference in New Issue
Block a user