mirror of
https://github.com/TriliumNext/Notes.git
synced 2025-07-27 18:12:29 +08:00
feat(import/zip): support root-relative paths
This commit is contained in:
parent
79b3b92ec9
commit
4a40b22c9a
@ -283,7 +283,7 @@ $$`;
|
|||||||
|
|
||||||
it("supports wikilink with root-relative path", () => {
|
it("supports wikilink with root-relative path", () => {
|
||||||
const input = `oh no my banana I bought on [[journal/monday]] has gone off! I’m taking it back to the [[other/shop]] for a refund`;
|
const input = `oh no my banana I bought on [[journal/monday]] has gone off! I’m taking it back to the [[other/shop]] for a refund`;
|
||||||
const expected = `<p>oh no my banana I bought on <a class="reference-link" href="journal/monday">journal/monday</a> has gone off! I’m taking it back to the <a class="reference-link" href="other/shop">other/shop</a> for a refund</p>`;
|
const expected = `<p>oh no my banana I bought on <a class="reference-link" href="/journal/monday">journal/monday</a> has gone off! I’m taking it back to the <a class="reference-link" href="/other/shop">other/shop</a> for a refund</p>`;
|
||||||
expect(markdownService.renderToHtml(input, "Title")).toStrictEqual(expected);
|
expect(markdownService.renderToHtml(input, "Title")).toStrictEqual(expected);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -215,7 +215,7 @@ function restoreFromMap(text: string, map: Map<string, string>): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function processWikiLinks(paragraph: string) {
|
function processWikiLinks(paragraph: string) {
|
||||||
paragraph = paragraph.replaceAll(/\[\[([^\[\]]+)\]\]/g, `<a class="reference-link" href="$1">$1</a>`);
|
paragraph = paragraph.replaceAll(/\[\[([^\[\]]+)\]\]/g, `<a class="reference-link" href="/$1">$1</a>`);
|
||||||
return paragraph;
|
return paragraph;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,6 +41,7 @@ async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRoo
|
|||||||
const createdPaths: Record<string, string> = { "/": importRootNote.noteId, "\\": importRootNote.noteId };
|
const createdPaths: Record<string, string> = { "/": importRootNote.noteId, "\\": importRootNote.noteId };
|
||||||
let metaFile: MetaFile | null = null;
|
let metaFile: MetaFile | null = null;
|
||||||
let firstNote: BNote | null = null;
|
let firstNote: BNote | null = null;
|
||||||
|
let topLevelPath = "";
|
||||||
const createdNoteIds = new Set<string>();
|
const createdNoteIds = new Set<string>();
|
||||||
|
|
||||||
function getNewNoteId(origNoteId: string) {
|
function getNewNoteId(origNoteId: string) {
|
||||||
@ -257,11 +258,12 @@ async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRoo
|
|||||||
saveAttributes(note, noteMeta);
|
saveAttributes(note, noteMeta);
|
||||||
|
|
||||||
firstNote = firstNote || note;
|
firstNote = firstNote || note;
|
||||||
|
|
||||||
return noteId;
|
return noteId;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getEntityIdFromRelativeUrl(url: string, filePath: string) {
|
function getEntityIdFromRelativeUrl(url: string, filePath: string) {
|
||||||
|
let absUrl;
|
||||||
|
if (!url.startsWith("/")) {
|
||||||
while (url.startsWith("./")) {
|
while (url.startsWith("./")) {
|
||||||
url = url.substr(2);
|
url = url.substr(2);
|
||||||
}
|
}
|
||||||
@ -279,6 +281,9 @@ async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRoo
|
|||||||
}
|
}
|
||||||
|
|
||||||
absUrl += `${absUrl.length > 0 ? "/" : ""}${url}`;
|
absUrl += `${absUrl.length > 0 ? "/" : ""}${url}`;
|
||||||
|
} else {
|
||||||
|
absUrl = topLevelPath + url;
|
||||||
|
}
|
||||||
|
|
||||||
const { noteMeta, attachmentMeta } = getMeta(absUrl);
|
const { noteMeta, attachmentMeta } = getMeta(absUrl);
|
||||||
|
|
||||||
@ -527,20 +532,28 @@ async function importZip(taskContext: TaskContext, fileBuffer: Buffer, importRoo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// we're running two passes to make sure that the meta file is loaded before the rest of the files is processed.
|
// we're running two passes in order to obtain critical information first (meta file and root)
|
||||||
|
const topLevelItems = new Set<string>();
|
||||||
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
|
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
|
||||||
const filePath = normalizeFilePath(entry.fileName);
|
const filePath = normalizeFilePath(entry.fileName);
|
||||||
|
|
||||||
|
// make sure that the meta file is loaded before the rest of the files is processed.
|
||||||
if (filePath === "!!!meta.json") {
|
if (filePath === "!!!meta.json") {
|
||||||
const content = await readContent(zipfile, entry);
|
const content = await readContent(zipfile, entry);
|
||||||
|
|
||||||
metaFile = JSON.parse(content.toString("utf-8"));
|
metaFile = JSON.parse(content.toString("utf-8"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// determine the root of the .zip (i.e. if it has only one top-level folder then the root is that folder, or the root of the archive if there are multiple top-level folders).
|
||||||
|
const firstSlash = filePath.indexOf("/");
|
||||||
|
const topLevelPath = (firstSlash !== -1 ? filePath.substring(0, firstSlash) : filePath);
|
||||||
|
topLevelItems.add(topLevelPath);
|
||||||
|
|
||||||
zipfile.readEntry();
|
zipfile.readEntry();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
topLevelPath = (topLevelItems.size > 1 ? "" : topLevelItems.values().next().value ?? "");
|
||||||
|
|
||||||
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
|
await readZipFile(fileBuffer, async (zipfile: yauzl.ZipFile, entry: yauzl.Entry) => {
|
||||||
const filePath = normalizeFilePath(entry.fileName);
|
const filePath = normalizeFilePath(entry.fileName);
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user