mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-27 10:02:59 +08:00
Merge remote-tracking branch 'origin/develop' into patch-2
This commit is contained in:
commit
56cf993f46
@ -128,7 +128,7 @@ Download the repository, install dependencies using `pnpm` and then run the envi
|
||||
git clone https://github.com/TriliumNext/Notes.git
|
||||
cd Notes
|
||||
pnpm install
|
||||
pnpm nx run edit-docs:serve
|
||||
pnpm nx run edit-docs:edit-docs
|
||||
```
|
||||
|
||||
### Building the Executable
|
||||
|
@ -328,7 +328,7 @@ button kbd {
|
||||
--bs-dropdown-zindex: 999;
|
||||
}
|
||||
|
||||
body.desktop .dropdown-menu {
|
||||
body.desktop .dropdown-menu.show {
|
||||
border: 1px solid var(--dropdown-border-color);
|
||||
box-shadow: 0px 10px 20px rgba(0, 0, 0, var(--dropdown-shadow-opacity));
|
||||
animation: dropdown-menu-opening 100ms ease-in;
|
||||
|
@ -89,7 +89,7 @@
|
||||
* supported when this class is used.
|
||||
*/
|
||||
|
||||
.dropdown-menu:not(.static) {
|
||||
.dropdown-menu.show:not(.static) {
|
||||
border-radius: var(--dropdown-border-radius);
|
||||
padding: var(--menu-padding-size) !important;
|
||||
font-size: 0.9rem !important;
|
||||
|
@ -1,27 +0,0 @@
|
||||
import { t } from "../../services/i18n.js";
|
||||
import options from "../../services/options.js";
|
||||
import CommandButtonWidget from "./command_button.js";
|
||||
|
||||
export default class CreateAiChatButton extends CommandButtonWidget {
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.icon("bx bx-bot")
|
||||
.title(t("ai.create_new_ai_chat"))
|
||||
.titlePlacement("bottom")
|
||||
.command("createAiChat")
|
||||
.class("icon-action");
|
||||
}
|
||||
|
||||
isEnabled() {
|
||||
return options.get("aiEnabled") === "true";
|
||||
}
|
||||
|
||||
async refreshWithNote() {
|
||||
if (this.isEnabled()) {
|
||||
this.$widget.show();
|
||||
} else {
|
||||
this.$widget.hide();
|
||||
}
|
||||
}
|
||||
}
|
@ -8,7 +8,10 @@ export default class CreatePaneButton extends OnClickButtonWidget {
|
||||
this.icon("bx-dock-right")
|
||||
.title(t("create_pane_button.create_new_split"))
|
||||
.titlePlacement("bottom")
|
||||
.onClick((widget) => widget.triggerCommand("openNewNoteSplit", { ntxId: widget.getClosestNtxId() }))
|
||||
.onClick((widget, e) => {
|
||||
widget.triggerCommand("openNewNoteSplit", { ntxId: widget.getClosestNtxId() });
|
||||
e.stopPropagation();
|
||||
})
|
||||
.class("icon-action");
|
||||
}
|
||||
}
|
||||
|
@ -186,7 +186,7 @@ export default class NoteActionsWidget extends NoteContextAwareWidget {
|
||||
|
||||
this.$convertNoteIntoAttachmentButton.toggle(note.isEligibleForConversionToAttachment());
|
||||
|
||||
this.toggleDisabled(this.$findInTextButton, ["text", "code", "book"].includes(note.type));
|
||||
this.toggleDisabled(this.$findInTextButton, ["text", "code", "book", "mindMap"].includes(note.type));
|
||||
|
||||
this.toggleDisabled(this.$showAttachmentsButton, !isInOptions);
|
||||
this.toggleDisabled(this.$showSourceButton, ["text", "code", "relationMap", "mermaid", "canvas", "mindMap", "geoMap"].includes(note.type));
|
||||
|
@ -188,7 +188,7 @@ export default class FindWidget extends NoteContextAwareWidget {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!["text", "code", "render"].includes(this.note?.type ?? "")) {
|
||||
if (!["text", "code", "render", "mindMap"].includes(this.note?.type ?? "")) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -250,6 +250,8 @@ export default class FindWidget extends NoteContextAwareWidget {
|
||||
case "text":
|
||||
const readOnly = await this.noteContext?.isReadOnly();
|
||||
return readOnly ? this.htmlHandler : this.textHandler;
|
||||
case "mindMap":
|
||||
return this.htmlHandler;
|
||||
default:
|
||||
console.warn("FindWidget: Unsupported note type for find widget", this.note?.type);
|
||||
}
|
||||
@ -352,7 +354,7 @@ export default class FindWidget extends NoteContextAwareWidget {
|
||||
}
|
||||
|
||||
isEnabled() {
|
||||
return super.isEnabled() && ["text", "code", "render"].includes(this.note?.type ?? "");
|
||||
return super.isEnabled() && ["text", "code", "render", "mindMap"].includes(this.note?.type ?? "");
|
||||
}
|
||||
|
||||
async entitiesReloadedEvent({ loadResults }: EventData<"entitiesReloaded">) {
|
||||
|
@ -85,7 +85,7 @@ export default class FindInHtml {
|
||||
if (this.$results?.length) {
|
||||
const $current = this.$results.eq(this.currentIndex);
|
||||
this.$results.removeClass(FIND_RESULT_SELECTED_CSS_CLASSNAME);
|
||||
$current[0].scrollIntoView();
|
||||
$current[0].scrollIntoView({ block: 'center', inline: 'center'});
|
||||
$current.addClass(FIND_RESULT_SELECTED_CSS_CLASSNAME);
|
||||
}
|
||||
}
|
||||
|
@ -378,16 +378,45 @@ export default class TabRowWidget extends BasicWidget {
|
||||
}
|
||||
|
||||
scrollTabContainer(direction: number, behavior: ScrollBehavior = "smooth") {
|
||||
const currentScrollLeft = this.$tabScrollingContainer[0]?.scrollLeft;
|
||||
this.$tabScrollingContainer[0].scrollTo({
|
||||
left: currentScrollLeft + direction,
|
||||
this.$tabScrollingContainer[0].scrollBy({
|
||||
left: direction,
|
||||
behavior
|
||||
});
|
||||
};
|
||||
|
||||
setupScrollEvents() {
|
||||
this.$tabScrollingContainer[0].addEventListener('wheel', (event) => {
|
||||
this.scrollTabContainer(event.deltaY * 1.5);
|
||||
let deltaX = 0;
|
||||
let isScrolling = false;
|
||||
const stepScroll = () => {
|
||||
if (Math.abs(deltaX) > 5) {
|
||||
const step = Math.round(deltaX * 0.2);
|
||||
deltaX -= step;
|
||||
this.scrollTabContainer(step, "instant");
|
||||
requestAnimationFrame(stepScroll);
|
||||
} else {
|
||||
this.scrollTabContainer(deltaX, "instant");
|
||||
deltaX = 0;
|
||||
isScrolling = false;
|
||||
}
|
||||
};
|
||||
this.$tabScrollingContainer[0].addEventListener('wheel', async (event) => {
|
||||
if (!event.shiftKey && event.deltaX === 0) {
|
||||
event.preventDefault();
|
||||
// Clamp deltaX between TAB_CONTAINER_MIN_WIDTH and TAB_CONTAINER_MIN_WIDTH * 3
|
||||
deltaX += Math.sign(event.deltaY) * Math.max(Math.min(Math.abs(event.deltaY), TAB_CONTAINER_MIN_WIDTH * 3), TAB_CONTAINER_MIN_WIDTH);
|
||||
if (!isScrolling) {
|
||||
isScrolling = true;
|
||||
stepScroll();
|
||||
}
|
||||
} else if (event.shiftKey) {
|
||||
event.preventDefault();
|
||||
if (event.deltaY > 0) {
|
||||
await appContext.tabManager.activateNextTabCommand();
|
||||
} else {
|
||||
await appContext.tabManager.activatePreviousTabCommand();
|
||||
}
|
||||
this.activeTabEl.scrollIntoView();
|
||||
}
|
||||
});
|
||||
|
||||
this.$scrollButtonLeft[0].addEventListener('click', () => this.scrollTabContainer(-200));
|
||||
|
@ -286,4 +286,13 @@ export default class MindMapWidget extends TypeWidget {
|
||||
utils.downloadSvgAsPng(this.note.title, svg);
|
||||
}
|
||||
|
||||
async executeWithContentElementEvent({ resolve, ntxId }: EventData<"executeWithContentElement">) {
|
||||
if (!this.isNoteContext(ntxId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.initialized;
|
||||
|
||||
resolve(this.$content.find('.main-node-container'));
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ test("Search works when dismissing a tab", async ({ page, context }) => {
|
||||
|
||||
await app.getTab(0).click();
|
||||
await app.openAndClickNoteActionMenu("Search in note");
|
||||
await expect(app.findAndReplaceWidget).toBeVisible();
|
||||
await expect(app.findAndReplaceWidget.first()).toBeVisible();
|
||||
});
|
||||
|
||||
test("New tab displays workspaces", async ({ page, context }) => {
|
||||
|
@ -1,13 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
`blobId` TEXT NOT NULL,
|
||||
`content` TEXT NULL DEFAULT NULL,
|
||||
`dateModified` TEXT NOT NULL,
|
||||
`utcDateModified` TEXT NOT NULL,
|
||||
PRIMARY KEY(`blobId`)
|
||||
);
|
||||
|
||||
ALTER TABLE notes ADD blobId TEXT DEFAULT NULL;
|
||||
ALTER TABLE note_revisions ADD blobId TEXT DEFAULT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS IDX_notes_blobId on notes (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_note_revisions_blobId on note_revisions (blobId);
|
@ -1,4 +0,0 @@
|
||||
DROP TABLE note_contents;
|
||||
DROP TABLE note_revision_contents;
|
||||
|
||||
DELETE FROM entity_changes WHERE entityName IN ('note_contents', 'note_revision_contents');
|
@ -1,26 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "revisions" (`revisionId` TEXT NOT NULL PRIMARY KEY,
|
||||
`noteId` TEXT NOT NULL,
|
||||
type TEXT DEFAULT '' NOT NULL,
|
||||
mime TEXT DEFAULT '' NOT NULL,
|
||||
`title` TEXT NOT NULL,
|
||||
`isProtected` INT NOT NULL DEFAULT 0,
|
||||
blobId TEXT DEFAULT NULL,
|
||||
`utcDateLastEdited` TEXT NOT NULL,
|
||||
`utcDateCreated` TEXT NOT NULL,
|
||||
`utcDateModified` TEXT NOT NULL,
|
||||
`dateLastEdited` TEXT NOT NULL,
|
||||
`dateCreated` TEXT NOT NULL);
|
||||
|
||||
INSERT INTO revisions (revisionId, noteId, type, mime, title, isProtected, utcDateLastEdited, utcDateCreated, utcDateModified, dateLastEdited, dateCreated, blobId)
|
||||
SELECT noteRevisionId, noteId, type, mime, title, isProtected, utcDateLastEdited, utcDateCreated, utcDateModified, dateLastEdited, dateCreated, blobId FROM note_revisions;
|
||||
|
||||
DROP TABLE note_revisions;
|
||||
|
||||
CREATE INDEX `IDX_revisions_noteId` ON `revisions` (`noteId`);
|
||||
CREATE INDEX `IDX_revisions_utcDateCreated` ON `revisions` (`utcDateCreated`);
|
||||
CREATE INDEX `IDX_revisions_utcDateLastEdited` ON `revisions` (`utcDateLastEdited`);
|
||||
CREATE INDEX `IDX_revisions_dateCreated` ON `revisions` (`dateCreated`);
|
||||
CREATE INDEX `IDX_revisions_dateLastEdited` ON `revisions` (`dateLastEdited`);
|
||||
CREATE INDEX IF NOT EXISTS IDX_revisions_blobId on revisions (blobId);
|
||||
|
||||
UPDATE entity_changes SET entityName = 'revisions' WHERE entityName = 'note_revisions';
|
@ -1,23 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "attachments"
|
||||
(
|
||||
attachmentId TEXT not null primary key,
|
||||
ownerId TEXT not null,
|
||||
role TEXT not null,
|
||||
mime TEXT not null,
|
||||
title TEXT not null,
|
||||
isProtected INT not null DEFAULT 0,
|
||||
position INT default 0 not null,
|
||||
blobId TEXT DEFAULT null,
|
||||
dateModified TEXT NOT NULL,
|
||||
utcDateModified TEXT not null,
|
||||
utcDateScheduledForErasureSince TEXT DEFAULT NULL,
|
||||
isDeleted INT not null,
|
||||
deleteId TEXT DEFAULT NULL);
|
||||
|
||||
CREATE INDEX IDX_attachments_ownerId_role
|
||||
on attachments (ownerId, role);
|
||||
|
||||
CREATE INDEX IDX_attachments_utcDateScheduledForErasureSince
|
||||
on attachments (utcDateScheduledForErasureSince);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS IDX_attachments_blobId on attachments (blobId);
|
@ -1,2 +0,0 @@
|
||||
DELETE FROM options WHERE name = 'hideIncludedImages_main';
|
||||
DELETE FROM entity_changes WHERE entityName = 'options' AND entityId = 'hideIncludedImages_main';
|
@ -1,2 +0,0 @@
|
||||
UPDATE options SET name = 'openNoteContexts' WHERE name = 'openTabs';
|
||||
UPDATE entity_changes SET entityId = 'openNoteContexts' WHERE entityName = 'options' AND entityId = 'openTabs';
|
@ -1 +0,0 @@
|
||||
SELECT 1;
|
@ -1,14 +0,0 @@
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '+', 'X') WHERE entityName = 'blobs';
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '/', 'Y') WHERE entityName = 'blobs';
|
@ -1,3 +0,0 @@
|
||||
CREATE INDEX IF NOT EXISTS IDX_notes_blobId on notes (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_revisions_blobId on revisions (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_attachments_blobId on attachments (blobId);
|
@ -1 +0,0 @@
|
||||
UPDATE attributes SET value = 'contentAndAttachmentsAndRevisionsSize' WHERE name = 'orderBy' AND value = 'noteSize';
|
@ -1,2 +0,0 @@
|
||||
-- emergency disabling of image compression since it appears to make problems in migration to 0.61
|
||||
UPDATE options SET value = 'false' WHERE name = 'compressImages';
|
@ -1,17 +0,0 @@
|
||||
-- + is normally replaced by X and / by Y, but this can temporarily cause UNIQUE key exception
|
||||
-- this might create blob duplicates, but cleanup will eventually take care of it
|
||||
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '+', 'A') WHERE entityName = 'blobs';
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '/', 'B') WHERE entityName = 'blobs';
|
@ -1,14 +0,0 @@
|
||||
-- Add the oauth user data table
|
||||
CREATE TABLE IF NOT EXISTS "user_data"
|
||||
(
|
||||
tmpID INT,
|
||||
username TEXT,
|
||||
email TEXT,
|
||||
userIDEncryptedDataKey TEXT,
|
||||
userIDVerificationHash TEXT,
|
||||
salt TEXT,
|
||||
derivedKey TEXT,
|
||||
isSetup TEXT DEFAULT "false",
|
||||
UNIQUE (tmpID),
|
||||
PRIMARY KEY (tmpID)
|
||||
);
|
@ -1,46 +0,0 @@
|
||||
-- Add tables for vector embeddings storage and management
|
||||
-- This migration adds embedding support to the main document.db database
|
||||
|
||||
-- Store embeddings for notes
|
||||
CREATE TABLE IF NOT EXISTS "note_embeddings" (
|
||||
"embedId" TEXT NOT NULL PRIMARY KEY,
|
||||
"noteId" TEXT NOT NULL,
|
||||
"providerId" TEXT NOT NULL,
|
||||
"modelId" TEXT NOT NULL,
|
||||
"dimension" INTEGER NOT NULL,
|
||||
"embedding" BLOB NOT NULL,
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"dateCreated" TEXT NOT NULL,
|
||||
"utcDateCreated" TEXT NOT NULL,
|
||||
"dateModified" TEXT NOT NULL,
|
||||
"utcDateModified" TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "IDX_note_embeddings_noteId" ON "note_embeddings" ("noteId");
|
||||
CREATE INDEX "IDX_note_embeddings_providerId_modelId" ON "note_embeddings" ("providerId", "modelId");
|
||||
|
||||
-- Table to track which notes need embedding updates
|
||||
CREATE TABLE IF NOT EXISTS "embedding_queue" (
|
||||
"noteId" TEXT NOT NULL PRIMARY KEY,
|
||||
"operation" TEXT NOT NULL, -- CREATE, UPDATE, DELETE
|
||||
"dateQueued" TEXT NOT NULL,
|
||||
"utcDateQueued" TEXT NOT NULL,
|
||||
"priority" INTEGER NOT NULL DEFAULT 0,
|
||||
"attempts" INTEGER NOT NULL DEFAULT 0,
|
||||
"lastAttempt" TEXT NULL,
|
||||
"error" TEXT NULL,
|
||||
"failed" INTEGER NOT NULL DEFAULT 0,
|
||||
"isProcessing" INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
-- Table to store embedding provider configurations
|
||||
CREATE TABLE IF NOT EXISTS "embedding_providers" (
|
||||
"providerId" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"priority" INTEGER NOT NULL DEFAULT 0,
|
||||
"config" TEXT NOT NULL, -- JSON config object
|
||||
"dateCreated" TEXT NOT NULL,
|
||||
"utcDateCreated" TEXT NOT NULL,
|
||||
"dateModified" TEXT NOT NULL,
|
||||
"utcDateModified" TEXT NOT NULL
|
||||
);
|
@ -1,5 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
data TEXT,
|
||||
expires INTEGER
|
||||
);
|
@ -1,5 +1,5 @@
|
||||
import sql from "../../../services/sql.js";
|
||||
import utils from "../../../services/utils.js";
|
||||
import sql from "../services/sql.js";
|
||||
import utils from "../services/utils.js";
|
||||
|
||||
interface NoteContentsRow {
|
||||
noteId: string;
|
@ -1,8 +1,8 @@
|
||||
import becca from "../../../becca/becca.js";
|
||||
import becca_loader from "../../../becca/becca_loader.js";
|
||||
import cls from "../../../services/cls.js";
|
||||
import log from "../../../services/log.js";
|
||||
import sql from "../../../services/sql.js";
|
||||
import becca from "../becca/becca.js";
|
||||
import becca_loader from "../becca/becca_loader.js";
|
||||
import cls from "../services/cls.js";
|
||||
import log from "../services/log.js";
|
||||
import sql from "../services/sql.js";
|
||||
|
||||
export default () => {
|
||||
cls.init(() => {
|
295
apps/server/src/migrations/migrations.ts
Normal file
295
apps/server/src/migrations/migrations.ts
Normal file
@ -0,0 +1,295 @@
|
||||
/**
|
||||
* @module
|
||||
*
|
||||
* Contains all the migrations that are run on the database.
|
||||
*/
|
||||
|
||||
// Migrations should be kept in descending order, so the latest migration is first.
|
||||
const MIGRATIONS: (SqlMigration | JsMigration)[] = [
|
||||
// Session store
|
||||
{
|
||||
version: 231,
|
||||
sql: /*sql*/`\
|
||||
CREATE TABLE IF NOT EXISTS sessions (
|
||||
id TEXT PRIMARY KEY,
|
||||
data TEXT,
|
||||
expires INTEGER
|
||||
);
|
||||
`
|
||||
},
|
||||
// Add tables for vector embeddings storage and management
|
||||
// This migration adds embedding support to the main document.db database
|
||||
{
|
||||
version: 230,
|
||||
sql: /*sql*/`\
|
||||
-- Store embeddings for notes
|
||||
CREATE TABLE IF NOT EXISTS "note_embeddings" (
|
||||
"embedId" TEXT NOT NULL PRIMARY KEY,
|
||||
"noteId" TEXT NOT NULL,
|
||||
"providerId" TEXT NOT NULL,
|
||||
"modelId" TEXT NOT NULL,
|
||||
"dimension" INTEGER NOT NULL,
|
||||
"embedding" BLOB NOT NULL,
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"dateCreated" TEXT NOT NULL,
|
||||
"utcDateCreated" TEXT NOT NULL,
|
||||
"dateModified" TEXT NOT NULL,
|
||||
"utcDateModified" TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "IDX_note_embeddings_noteId" ON "note_embeddings" ("noteId");
|
||||
CREATE INDEX "IDX_note_embeddings_providerId_modelId" ON "note_embeddings" ("providerId", "modelId");
|
||||
|
||||
-- Table to track which notes need embedding updates
|
||||
CREATE TABLE IF NOT EXISTS "embedding_queue" (
|
||||
"noteId" TEXT NOT NULL PRIMARY KEY,
|
||||
"operation" TEXT NOT NULL, -- CREATE, UPDATE, DELETE
|
||||
"dateQueued" TEXT NOT NULL,
|
||||
"utcDateQueued" TEXT NOT NULL,
|
||||
"priority" INTEGER NOT NULL DEFAULT 0,
|
||||
"attempts" INTEGER NOT NULL DEFAULT 0,
|
||||
"lastAttempt" TEXT NULL,
|
||||
"error" TEXT NULL,
|
||||
"failed" INTEGER NOT NULL DEFAULT 0,
|
||||
"isProcessing" INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
-- Table to store embedding provider configurations
|
||||
CREATE TABLE IF NOT EXISTS "embedding_providers" (
|
||||
"providerId" TEXT NOT NULL PRIMARY KEY,
|
||||
"name" TEXT NOT NULL,
|
||||
"priority" INTEGER NOT NULL DEFAULT 0,
|
||||
"config" TEXT NOT NULL, -- JSON config object
|
||||
"dateCreated" TEXT NOT NULL,
|
||||
"utcDateCreated" TEXT NOT NULL,
|
||||
"dateModified" TEXT NOT NULL,
|
||||
"utcDateModified" TEXT NOT NULL
|
||||
);
|
||||
`
|
||||
},
|
||||
|
||||
// add the oauth user data table
|
||||
{
|
||||
version: 229,
|
||||
sql: /*sql*/`\
|
||||
CREATE TABLE IF NOT EXISTS "user_data"
|
||||
(
|
||||
tmpID INT,
|
||||
username TEXT,
|
||||
email TEXT,
|
||||
userIDEncryptedDataKey TEXT,
|
||||
userIDVerificationHash TEXT,
|
||||
salt TEXT,
|
||||
derivedKey TEXT,
|
||||
isSetup TEXT DEFAULT "false",
|
||||
UNIQUE (tmpID),
|
||||
PRIMARY KEY (tmpID)
|
||||
);
|
||||
`
|
||||
},
|
||||
// fix blob IDs
|
||||
{
|
||||
version: 228,
|
||||
sql: /*sql*/`\
|
||||
-- + is normally replaced by X and / by Y, but this can temporarily cause UNIQUE key exception
|
||||
-- this might create blob duplicates, but cleanup will eventually take care of it
|
||||
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '+', 'A');
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '/', 'B');
|
||||
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '+', 'A') WHERE entityName = 'blobs';
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '/', 'B') WHERE entityName = 'blobs';
|
||||
`
|
||||
},
|
||||
// disable image compression
|
||||
{
|
||||
version: 227,
|
||||
sql: /*sql*/`\
|
||||
-- emergency disabling of image compression since it appears to make problems in migration to 0.61
|
||||
UPDATE options SET value = 'false' WHERE name = 'compressImages';
|
||||
`
|
||||
},
|
||||
// rename note size label
|
||||
{
|
||||
version: 226,
|
||||
sql: /*sql*/`\
|
||||
UPDATE attributes SET value = 'contentAndAttachmentsAndRevisionsSize' WHERE name = 'orderBy' AND value = 'noteSize';
|
||||
`
|
||||
},
|
||||
// create blob ID indices
|
||||
{
|
||||
version: 225,
|
||||
sql: /*sql*/`\
|
||||
CREATE INDEX IF NOT EXISTS IDX_notes_blobId on notes (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_revisions_blobId on revisions (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_attachments_blobId on attachments (blobId);
|
||||
`
|
||||
},
|
||||
// fix blob IDs
|
||||
{
|
||||
version: 224,
|
||||
sql: /*sql*/`\
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE blobs SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE notes SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE attachments SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '+', 'X');
|
||||
UPDATE revisions SET blobId = REPLACE(blobId, '/', 'Y');
|
||||
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '+', 'X') WHERE entityName = 'blobs';
|
||||
UPDATE entity_changes SET entityId = REPLACE(entityId, '/', 'Y') WHERE entityName = 'blobs';
|
||||
`
|
||||
},
|
||||
// no operation
|
||||
{
|
||||
version: 223,
|
||||
sql: /*sql*/`\
|
||||
SELECT 1;
|
||||
`
|
||||
},
|
||||
// rename open tabs to open note contexts
|
||||
{
|
||||
version: 222,
|
||||
sql: /*sql*/`\
|
||||
UPDATE options SET name = 'openNoteContexts' WHERE name = 'openTabs';
|
||||
UPDATE entity_changes SET entityId = 'openNoteContexts' WHERE entityName = 'options' AND entityId = 'openTabs';
|
||||
`
|
||||
},
|
||||
// remove hide included images option
|
||||
{
|
||||
version: 221,
|
||||
sql: /*sql*/`\
|
||||
DELETE FROM options WHERE name = 'hideIncludedImages_main';
|
||||
DELETE FROM entity_changes WHERE entityName = 'options' AND entityId = 'hideIncludedImages_main';
|
||||
`
|
||||
},
|
||||
// migrate images to attachments
|
||||
{
|
||||
version: 220,
|
||||
module: () => import("./0220__migrate_images_to_attachments.js")
|
||||
},
|
||||
// attachments
|
||||
{
|
||||
version: 219,
|
||||
sql: /*sql*/`\
|
||||
CREATE TABLE IF NOT EXISTS "attachments"
|
||||
(
|
||||
attachmentId TEXT not null primary key,
|
||||
ownerId TEXT not null,
|
||||
role TEXT not null,
|
||||
mime TEXT not null,
|
||||
title TEXT not null,
|
||||
isProtected INT not null DEFAULT 0,
|
||||
position INT default 0 not null,
|
||||
blobId TEXT DEFAULT null,
|
||||
dateModified TEXT NOT NULL,
|
||||
utcDateModified TEXT not null,
|
||||
utcDateScheduledForErasureSince TEXT DEFAULT NULL,
|
||||
isDeleted INT not null,
|
||||
deleteId TEXT DEFAULT NULL);
|
||||
|
||||
CREATE INDEX IDX_attachments_ownerId_role
|
||||
on attachments (ownerId, role);
|
||||
|
||||
CREATE INDEX IDX_attachments_utcDateScheduledForErasureSince
|
||||
on attachments (utcDateScheduledForErasureSince);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS IDX_attachments_blobId on attachments (blobId);
|
||||
`
|
||||
},
|
||||
// rename note revision to revision
|
||||
{
|
||||
version: 218,
|
||||
sql: /*sql*/`\
|
||||
CREATE TABLE IF NOT EXISTS "revisions" (
|
||||
revisionId TEXT NOT NULL PRIMARY KEY,
|
||||
noteId TEXT NOT NULL,
|
||||
type TEXT DEFAULT '' NOT NULL,
|
||||
mime TEXT DEFAULT '' NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
isProtected INT NOT NULL DEFAULT 0,
|
||||
blobId TEXT DEFAULT NULL,
|
||||
utcDateLastEdited TEXT NOT NULL,
|
||||
utcDateCreated TEXT NOT NULL,
|
||||
utcDateModified TEXT NOT NULL,
|
||||
dateLastEdited TEXT NOT NULL,
|
||||
dateCreated TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO revisions (revisionId, noteId, type, mime, title, isProtected, utcDateLastEdited, utcDateCreated, utcDateModified, dateLastEdited, dateCreated, blobId)
|
||||
SELECT noteRevisionId, noteId, type, mime, title, isProtected, utcDateLastEdited, utcDateCreated, utcDateModified, dateLastEdited, dateCreated, blobId FROM note_revisions;
|
||||
|
||||
DROP TABLE note_revisions;
|
||||
|
||||
CREATE INDEX IDX_revisions_noteId ON revisions (noteId);
|
||||
CREATE INDEX IDX_revisions_utcDateCreated ON revisions (utcDateCreated);
|
||||
CREATE INDEX IDX_revisions_utcDateLastEdited ON revisions (utcDateLastEdited);
|
||||
CREATE INDEX IDX_revisions_dateCreated ON revisions (dateCreated);
|
||||
CREATE INDEX IDX_revisions_dateLastEdited ON revisions (dateLastEdited);
|
||||
CREATE INDEX IF NOT EXISTS IDX_revisions_blobId on revisions (blobId);
|
||||
|
||||
UPDATE entity_changes SET entityName = 'revisions' WHERE entityName = 'note_revisions';
|
||||
`
|
||||
},
|
||||
// drop content tables
|
||||
{
|
||||
version: 217,
|
||||
sql: /*sql*/`\
|
||||
DROP TABLE note_contents;
|
||||
DROP TABLE note_revision_contents;
|
||||
|
||||
DELETE FROM entity_changes WHERE entityName IN ('note_contents', 'note_revision_contents');
|
||||
`
|
||||
},
|
||||
{
|
||||
version: 216,
|
||||
module: async () => import("./0216__move_content_into_blobs.js")
|
||||
},
|
||||
// content structure
|
||||
{
|
||||
version: 215,
|
||||
sql: /*sql*/`\
|
||||
CREATE TABLE IF NOT EXISTS "blobs" (
|
||||
blobId TEXT NOT NULL,
|
||||
content TEXT NULL DEFAULT NULL,
|
||||
dateModified TEXT NOT NULL,
|
||||
utcDateModified TEXT NOT NULL,
|
||||
PRIMARY KEY (blobId)
|
||||
);
|
||||
|
||||
ALTER TABLE notes ADD blobId TEXT DEFAULT NULL;
|
||||
ALTER TABLE note_revisions ADD blobId TEXT DEFAULT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS IDX_notes_blobId on notes (blobId);
|
||||
CREATE INDEX IF NOT EXISTS IDX_note_revisions_blobId on note_revisions (blobId);
|
||||
`
|
||||
}
|
||||
];
|
||||
|
||||
export default MIGRATIONS;
|
||||
|
||||
interface Migration {
|
||||
version: number;
|
||||
}
|
||||
|
||||
interface SqlMigration extends Migration {
|
||||
sql: string;
|
||||
}
|
||||
|
||||
interface JsMigration extends Migration {
|
||||
module: () => Promise<{ default: () => void }>;
|
||||
}
|
@ -72,7 +72,7 @@ export default function buildLaunchBarConfig() {
|
||||
id: "_lbLlmChat",
|
||||
title: t("hidden-subtree.llm-chat-title"),
|
||||
type: "launcher",
|
||||
command: "createAiChat",
|
||||
builtinWidget: "aiChatLauncher",
|
||||
icon: "bx bx-bot",
|
||||
attributes: [
|
||||
{ type: "label", name: "desktopOnly" }
|
||||
|
@ -1,25 +1,19 @@
|
||||
import backupService from "./backup.js";
|
||||
import sql from "./sql.js";
|
||||
import fs from "fs";
|
||||
import log from "./log.js";
|
||||
import { crash } from "./utils.js";
|
||||
import resourceDir from "./resource_dir.js";
|
||||
import appInfo from "./app_info.js";
|
||||
import cls from "./cls.js";
|
||||
import { t } from "i18next";
|
||||
import { join } from "path";
|
||||
import MIGRATIONS from "../migrations/migrations.js";
|
||||
|
||||
interface MigrationInfo {
|
||||
dbVersion: number;
|
||||
name: string;
|
||||
file: string;
|
||||
type: "sql" | "js" | "ts" | string;
|
||||
/**
|
||||
* Contains the JavaScript/TypeScript migration as a callback method that must be called to trigger the migration.
|
||||
* The method cannot be async since it runs in an SQL transaction.
|
||||
* For SQL migrations, this value is falsy.
|
||||
* If string, then the migration is an SQL script that will be executed.
|
||||
* If a function, then the migration is a JavaScript/TypeScript module that will be executed.
|
||||
*/
|
||||
module?: () => void;
|
||||
migration: string | (() => void);
|
||||
}
|
||||
|
||||
async function migrate() {
|
||||
@ -37,7 +31,6 @@ async function migrate() {
|
||||
);
|
||||
|
||||
const migrations = await prepareMigrations(currentDbVersion);
|
||||
migrations.sort((a, b) => a.dbVersion - b.dbVersion);
|
||||
|
||||
// all migrations are executed in one transaction - upgrade either succeeds, or the user can stay at the old version
|
||||
// otherwise if half of the migrations succeed, user can't use any version - DB is too "new" for the old app,
|
||||
@ -76,53 +69,37 @@ async function migrate() {
|
||||
}
|
||||
|
||||
async function prepareMigrations(currentDbVersion: number): Promise<MigrationInfo[]> {
|
||||
const migrationFiles = fs.readdirSync(resourceDir.MIGRATIONS_DIR) ?? [];
|
||||
MIGRATIONS.sort((a, b) => a.version - b.version);
|
||||
const migrations: MigrationInfo[] = [];
|
||||
for (const file of migrationFiles) {
|
||||
const match = file.match(/^([0-9]{4})__([a-zA-Z0-9_ ]+)\.(sql|js|ts)$/);
|
||||
if (!match) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dbVersion = parseInt(match[1]);
|
||||
for (const migration of MIGRATIONS) {
|
||||
const dbVersion = migration.version;
|
||||
if (dbVersion > currentDbVersion) {
|
||||
const name = match[2];
|
||||
const type = match[3];
|
||||
|
||||
const migration: MigrationInfo = {
|
||||
dbVersion: dbVersion,
|
||||
name: name,
|
||||
file: file,
|
||||
type: type
|
||||
};
|
||||
|
||||
if (type === "js" || type === "ts") {
|
||||
if ("sql" in migration) {
|
||||
migrations.push({
|
||||
dbVersion,
|
||||
migration: migration.sql
|
||||
});
|
||||
} else {
|
||||
// Due to ESM imports, the migration file needs to be imported asynchronously and thus cannot be loaded at migration time (since migration is not asynchronous).
|
||||
// As such we have to preload the ESM.
|
||||
// Going back to the original approach but making it webpack-compatible
|
||||
const importPath = join(resourceDir.MIGRATIONS_DIR, file);
|
||||
migration.module = (await import(importPath)).default;
|
||||
migrations.push({
|
||||
dbVersion,
|
||||
migration: (await migration.module()).default
|
||||
});
|
||||
}
|
||||
|
||||
migrations.push(migration);
|
||||
}
|
||||
}
|
||||
return migrations;
|
||||
}
|
||||
|
||||
function executeMigration(mig: MigrationInfo) {
|
||||
if (mig.module) {
|
||||
console.log("Migration with JS module");
|
||||
mig.module();
|
||||
} else if (mig.type === "sql") {
|
||||
const migrationSql = fs.readFileSync(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`).toString("utf8");
|
||||
|
||||
console.log(`Migration with SQL script: ${migrationSql}`);
|
||||
|
||||
sql.executeScript(migrationSql);
|
||||
function executeMigration({ migration }: MigrationInfo) {
|
||||
if (typeof migration === "string") {
|
||||
console.log(`Migration with SQL script: ${migration}`);
|
||||
sql.executeScript(migration);
|
||||
} else {
|
||||
throw new Error(`Unknown migration type '${mig.type}'`);
|
||||
}
|
||||
console.log("Migration with JS module");
|
||||
migration();
|
||||
};
|
||||
}
|
||||
|
||||
function getDbVersion() {
|
||||
|
@ -14,16 +14,8 @@ if (!fs.existsSync(DB_INIT_DIR)) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const MIGRATIONS_DIR = path.resolve(DB_INIT_DIR, "migrations");
|
||||
|
||||
if (!fs.existsSync(MIGRATIONS_DIR)) {
|
||||
log.error(`Could not find migration directory: ${MIGRATIONS_DIR}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
export default {
|
||||
RESOURCE_DIR,
|
||||
MIGRATIONS_DIR,
|
||||
DB_INIT_DIR,
|
||||
ELECTRON_APP_ROOT_DIR
|
||||
};
|
||||
|
@ -59,6 +59,7 @@
|
||||
* [Text notes: add a way to move up and down text lines via a keyboard shortcut](https://github.com/TriliumNext/Notes/issues/1002) by @dogfuntom
|
||||
* [improve tab scroll UX by switching from instant to smooth behavior](https://github.com/TriliumNext/Notes/pull/2030) by @SiriusXT
|
||||
* Calendar view: display calendar view if `#viewType=calendar` is set.
|
||||
* [Mind map: add search support](https://github.com/TriliumNext/Notes/pull/2055) by @SiriusXT
|
||||
|
||||
## 📖 Documentation
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user