mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-29 11:02:28 +08:00
fix(migration): JavaScript-based migrations not working
This commit is contained in:
parent
01ff3d396d
commit
a80c3e32f1
@ -1,11 +1,24 @@
|
|||||||
module.exports = () => {
|
interface NoteContentsRow {
|
||||||
const sql = require("../../src/services/sql");
|
noteId: string;
|
||||||
const utils = require("../../src/services/utils");
|
content: string | Buffer;
|
||||||
|
dateModified: string;
|
||||||
|
utcDateModified: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface NoteRevisionContents {
|
||||||
|
noteRevisionId: string;
|
||||||
|
content: string | Buffer;
|
||||||
|
utcDateModified: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async () => {
|
||||||
|
const sql = (await import("../../src/services/sql")).default;
|
||||||
|
const utils = (await import("../../src/services/utils")).default;
|
||||||
|
|
||||||
const existingBlobIds = new Set();
|
const existingBlobIds = new Set();
|
||||||
|
|
||||||
for (const noteId of sql.getColumn(`SELECT noteId FROM note_contents`)) {
|
for (const noteId of sql.getColumn<string>(`SELECT noteId FROM note_contents`)) {
|
||||||
const row = sql.getRow(`SELECT noteId, content, dateModified, utcDateModified FROM note_contents WHERE noteId = ?`, [noteId]);
|
const row = sql.getRow<NoteContentsRow>(`SELECT noteId, content, dateModified, utcDateModified FROM note_contents WHERE noteId = ?`, [noteId]);
|
||||||
const blobId = utils.hashedBlobId(row.content);
|
const blobId = utils.hashedBlobId(row.content);
|
||||||
|
|
||||||
if (!existingBlobIds.has(blobId)) {
|
if (!existingBlobIds.has(blobId)) {
|
||||||
@ -28,7 +41,7 @@ module.exports = () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (const noteRevisionId of sql.getColumn(`SELECT noteRevisionId FROM note_revision_contents`)) {
|
for (const noteRevisionId of sql.getColumn(`SELECT noteRevisionId FROM note_revision_contents`)) {
|
||||||
const row = sql.getRow(`SELECT noteRevisionId, content, utcDateModified FROM note_revision_contents WHERE noteRevisionId = ?`, [noteRevisionId]);
|
const row = sql.getRow<NoteRevisionContents>(`SELECT noteRevisionId, content, utcDateModified FROM note_revision_contents WHERE noteRevisionId = ?`, [noteRevisionId]);
|
||||||
const blobId = utils.hashedBlobId(row.content);
|
const blobId = utils.hashedBlobId(row.content);
|
||||||
|
|
||||||
if (!existingBlobIds.has(blobId)) {
|
if (!existingBlobIds.has(blobId)) {
|
||||||
@ -44,7 +57,7 @@ module.exports = () => {
|
|||||||
sql.execute("UPDATE entity_changes SET entityName = 'blobs', entityId = ? WHERE entityName = 'note_revision_contents' AND entityId = ?", [blobId, row.noteRevisionId]);
|
sql.execute("UPDATE entity_changes SET entityName = 'blobs', entityId = ? WHERE entityName = 'note_revision_contents' AND entityId = ?", [blobId, row.noteRevisionId]);
|
||||||
} else {
|
} else {
|
||||||
// duplicates
|
// duplicates
|
||||||
sql.execute("DELETE FROM entity_changes WHERE entityName = 'note_revision_contents' AND entityId = ?", [row.noteId]);
|
sql.execute("DELETE FROM entity_changes WHERE entityName = 'note_revision_contents' AND entityId = ?", [row.noteRevisionId]);
|
||||||
}
|
}
|
||||||
|
|
||||||
sql.execute("UPDATE note_revisions SET blobId = ? WHERE noteRevisionId = ?", [blobId, row.noteRevisionId]);
|
sql.execute("UPDATE note_revisions SET blobId = ? WHERE noteRevisionId = ?", [blobId, row.noteRevisionId]);
|
@ -1,9 +1,9 @@
|
|||||||
module.exports = () => {
|
export default async () => {
|
||||||
const beccaLoader = require("../../src/becca/becca_loader");
|
const beccaLoader = (await import("../../src/becca/becca_loader")).default;
|
||||||
const becca = require("../../src/becca/becca");
|
const becca = (await import("../../src/becca/becca")).default;
|
||||||
const cls = require("../../src/services/cls");
|
const cls = (await import("../../src/services/cls")).default;
|
||||||
const log = require("../../src/services/log");
|
const log = (await import("../../src/services/log")).default;
|
||||||
const sql = require("../../src/services/sql");
|
const sql = (await import("../../src/services/sql")).default;
|
||||||
|
|
||||||
cls.init(() => {
|
cls.init(() => {
|
||||||
// emergency disabling of image compression since it appears to make problems in migration to 0.61
|
// emergency disabling of image compression since it appears to make problems in migration to 0.61
|
@ -0,0 +1,17 @@
|
|||||||
|
import { describe, it } from "vitest";
|
||||||
|
import becca from "../becca/becca.js";
|
||||||
|
import sql from "./sql.js";
|
||||||
|
import migration from "./migration.js";
|
||||||
|
import cls from "./cls.js";
|
||||||
|
|
||||||
|
describe("Migration", () => {
|
||||||
|
it("migrates from v214", async () => {
|
||||||
|
return new Promise<void>((resolve) => {
|
||||||
|
cls.init(async () => {
|
||||||
|
sql.rebuildIntegrationTestDatabase("test/db/document_v214.db");
|
||||||
|
await migration.migrateIfNecessary();
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -106,7 +106,7 @@ async function executeMigration(mig: MigrationInfo) {
|
|||||||
console.log("Migration with JS module");
|
console.log("Migration with JS module");
|
||||||
|
|
||||||
const migrationModule = await import(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`);
|
const migrationModule = await import(`${resourceDir.MIGRATIONS_DIR}/${mig.file}`);
|
||||||
migrationModule();
|
await migrationModule.default();
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`Unknown migration type '${mig.type}'`);
|
throw new Error(`Unknown migration type '${mig.type}'`);
|
||||||
}
|
}
|
||||||
|
@ -25,14 +25,18 @@ function buildDatabase() {
|
|||||||
return new Database(dataDir.DOCUMENT_PATH);
|
return new Database(dataDir.DOCUMENT_PATH);
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildIntegrationTestDatabase() {
|
function buildIntegrationTestDatabase(dbPath?: string) {
|
||||||
const dbBuffer = fs.readFileSync(dataDir.DOCUMENT_PATH);
|
const dbBuffer = fs.readFileSync(dbPath ?? dataDir.DOCUMENT_PATH);
|
||||||
return new Database(dbBuffer);
|
return new Database(dbBuffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
function rebuildIntegrationTestDatabase() {
|
function rebuildIntegrationTestDatabase(dbPath: string) {
|
||||||
|
if (dbConnection) {
|
||||||
|
dbConnection.close();
|
||||||
|
}
|
||||||
|
|
||||||
// This allows a database that is read normally but is kept in memory and discards all modifications.
|
// This allows a database that is read normally but is kept in memory and discards all modifications.
|
||||||
dbConnection = buildIntegrationTestDatabase();
|
dbConnection = buildIntegrationTestDatabase(dbPath);
|
||||||
statementCache = {};
|
statementCache = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Binary file not shown.
Loading…
x
Reference in New Issue
Block a user