Skip to content
Open
202 changes: 135 additions & 67 deletions apps/obsidian/src/utils/importNodes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -296,9 +296,7 @@ export const fetchNodeContentWithMetadata = async ({

return {
content: data.text,
createdAt: data.created
? new Date(data.created + "Z").valueOf()
: 0,
createdAt: data.created ? new Date(data.created + "Z").valueOf() : 0,
modifiedAt: data.last_modified
? new Date(data.last_modified + "Z").valueOf()
: 0,
Expand Down Expand Up @@ -487,11 +485,7 @@ const updateMarkdownAssetLinks = ({
app: App;
originalNodePath?: string;
}): string => {
if (oldPathToNewPath.size === 0) {
return content;
}

// Create a set of all new paths for quick lookup (used by findImportedAssetFile)
// Create a set of all new paths for quick lookup (used by findImportedAssetFile when pathMapping has entries)
const newPaths = new Set(oldPathToNewPath.values());

let updatedContent = content;
Expand All @@ -500,6 +494,13 @@ const updateMarkdownAssetLinks = ({
? targetFile.path.replace(/\/[^/]*$/, "")
: "";

// When the note is under import/{spaceName}/, only treat wiki links as resolved if the target is in this folder (not some other vault file).
const pathParts = targetFile.path.split("/");
const importFolder =
pathParts[0] === "import" && pathParts.length >= 2
? pathParts.slice(0, 2).join("/")
: null;

/** Path of targetFile relative to the current note, for use in links. Obsidian resolves relative links from the note's directory. */
const getRelativeLinkPath = (assetPath: string): string => {
const noteParts = noteDir ? noteDir.split("/").filter(Boolean) : [];
Expand Down Expand Up @@ -603,48 +604,88 @@ const updateMarkdownAssetLinks = ({
return null;
};

const processLink = (linkPath: string): string => {
// Skip external URLs
if (linkPath.startsWith("http://") || linkPath.startsWith("https://")) {
return linkPath;
}

// First, try to find if this link resolves to one of our imported assets
const importedAssetFile = findImportedAssetFile(linkPath);
if (importedAssetFile) {
return getRelativeLinkPath(importedAssetFile.path);
}

// Direct lookup from pathMapping (record built when we downloaded each asset)
const newPath = getNewPathForLink(linkPath);
if (newPath) {
const newFile = app.metadataCache.getFirstLinkpathDest(
newPath,
targetFile.path,
);
if (newFile) {
return getRelativeLinkPath(newFile.path);
}
}

// Only resolve to files under import/{spaceName}/ so we don't point at the wrong vault's files
const resolvedFile = app.metadataCache.getFirstLinkpathDest(
linkPath,
targetFile.path,
);
const isInImportFolder =
importFolder &&
resolvedFile &&
resolvedFile.path.startsWith(importFolder + "/");
if (isInImportFolder && resolvedFile) {
return getRelativeLinkPath(resolvedFile.path);
}

// Unresolved (dead) link from another vault: rewrite so that when the user creates the file from this link, it is created under import/{vaultName}/ in the same relative position as in the source vault
if (importFolder && originalNodePath && !resolvedFile) {
// Vault-relative link (e.g. "Discourse Nodes/EVD - no relation testing") -> use as-is. Path-from-current-file (e.g. "EVD - no relation testing") -> resolve relative to source note dir
const canonicalSourcePath =
linkPath.includes("/") &&
!linkPath.startsWith(".") &&
!linkPath.startsWith("/")
? normalizePathForLookup(linkPath)
: (getCanonicalFromOriginalNote(linkPath) ??
normalizePathForLookup(linkPath));
return `${importFolder}/${canonicalSourcePath}`;
}

return linkPath;
};

// Match wiki links: [[path]] or [[path|alias]]
const wikiLinkRegex = /\[\[([^\]]+)\]\]/g;
updatedContent = updatedContent.replace(
wikiLinkRegex,
(match, linkContent) => {
(match, linkContent: string) => {
// Extract path and optional alias
const [linkPath, alias] = linkContent
.split("|")
.map((s: string) => s.trim());

// Skip external URLs
if (linkPath.startsWith("http://") || linkPath.startsWith("https://")) {
return match;
}

// First, try to find if this link resolves to one of our imported assets
const importedAssetFile = findImportedAssetFile(linkPath);
if (importedAssetFile) {
const linkText = getRelativeLinkPath(importedAssetFile.path);
if (alias) {
return `[[${linkText}|${alias}]]`;
}
return `[[${linkText}]]`;
}

// Direct lookup from pathMapping (record built when we downloaded each asset)
const newPath = getNewPathForLink(linkPath);
if (newPath) {
const newFile = app.metadataCache.getFirstLinkpathDest(
newPath,
targetFile.path,
);
if (newFile) {
const linkText = getRelativeLinkPath(newFile.path);
if (alias) {
return `[[${linkText}|${alias}]]`;
}
return `[[${linkText}]]`;
}
if (!linkPath) return match;
let processedPath = processLink(linkPath);
if (processedPath.endsWith(".md") && !linkPath.endsWith(".md"))
processedPath = processedPath.substring(0, processedPath.length - 3);
if (alias) {
return `[[${processedPath}|${alias}]]`;
}
return `[[${processedPath}|${linkPath}]]`;
},
);

return match;
// Match markdown links (non-image): [text](path) — internal paths resolved like wikilinks, href kept URL-encoded
const markdownLinkRegex = /(?<!!)\[([^\]]*)\]\(([^)]+)\)/g;
updatedContent = updatedContent.replace(
markdownLinkRegex,
(match, linkText: string, linkPath: string) => {
if (!linkPath) return match;
linkPath = decodeURI(linkPath);
const processedPath = encodePathForMarkdownLink(processLink(linkPath));
return `[${linkText}](${processedPath})`;
},
);

Expand Down Expand Up @@ -894,7 +935,6 @@ const importAssetsForNode = async ({

// Track path mapping (raw + normalized key so updateMarkdownAssetLinks can lookup by link text)
setPathMapping(filepath, targetPath);
console.log(`Imported asset: ${filepath} -> ${targetPath}`);
} catch (error) {
const errorMsg = `Error importing asset ${fileRef.filepath}: ${error}`;
errors.push(errorMsg);
Expand All @@ -917,6 +957,15 @@ const sanitizeFileName = (fileName: string): string => {
.trim();
};

/** Sanitize each path segment for use under import folder (preserves source vault folder structure). */
const sanitizePathForImport = (path: string): string => {
return path
.split("/")
.map((segment) => sanitizeFileName(segment))
.filter(Boolean)
.join("/");
};

type ParsedFrontmatter = {
nodeTypeId?: string;
nodeInstanceId?: string;
Expand Down Expand Up @@ -1187,11 +1236,13 @@ export const importSelectedNodes = async ({
content,
createdAt: contentCreatedAt,
modifiedAt: contentModifiedAt,
filePath,
filePath: contentFilePath,
} = nodeContent;
const createdAt = node.createdAt ?? contentCreatedAt;
const modifiedAt = node.modifiedAt ?? contentModifiedAt;
const originalNodePath: string | undefined = node.filePath;
// Use source vault path from Content direct variant metadata for wikilink rewriting and asset placement
const originalNodePath: string | undefined =
contentFilePath ?? node.filePath;

// Sanitize file name
const sanitizedFileName = sanitizeFileName(fileName);
Expand All @@ -1201,14 +1252,20 @@ export const importSelectedNodes = async ({
// Update existing file - use its current path
finalFilePath = existingFile.path;
} else {
// Create new file in the import folder
finalFilePath = `${importFolderPath}/${sanitizedFileName}.md`;

// Check if file path already exists (edge case: same title but different nodeInstanceId)
let counter = 1;
while (await plugin.app.vault.adapter.exists(finalFilePath)) {
finalFilePath = `${importFolderPath}/${sanitizedFileName} (${counter}).md`;
counter++;
// Preserve source vault folder structure under import/{vaultName} when we have filePath from Content
const pathUnderImport =
contentFilePath && contentFilePath.includes("/")
? sanitizePathForImport(contentFilePath)
: `${sanitizedFileName}.md`;
finalFilePath = `${importFolderPath}/${pathUnderImport}`;

// Ensure all parent folders exist (e.g. import/VaultName/Discourse Nodes/SubFolder)
const dirParts = finalFilePath.split("/");
for (let i = 1; i < dirParts.length - 1; i++) {
const folderPath = dirParts.slice(0, i + 1).join("/");
if (!(await plugin.app.vault.adapter.exists(folderPath))) {
await plugin.app.vault.createFolder(folderPath);
}
}
}

Expand All @@ -1220,7 +1277,7 @@ export const importSelectedNodes = async ({
sourceSpaceId: spaceId,
sourceSpaceUri: spaceUri,
rawContent: content,
originalFilePath: filePath,
originalFilePath: contentFilePath,
filePath: finalFilePath,
importedCreatedAt: createdAt,
importedModifiedAt: modifiedAt,
Expand Down Expand Up @@ -1250,21 +1307,18 @@ export const importSelectedNodes = async ({
originalNodePath,
});

// Update markdown content with new asset paths if assets were imported
if (assetImportResult.pathMapping.size > 0) {
const currentContent = await plugin.app.vault.read(processedFile);
const updatedContent = updateMarkdownAssetLinks({
content: currentContent,
oldPathToNewPath: assetImportResult.pathMapping,
targetFile: processedFile,
app: plugin.app,
originalNodePath,
});

// Only update if content changed
if (updatedContent !== currentContent) {
await plugin.app.vault.modify(processedFile, updatedContent);
}
// Update markdown content: rewrite asset paths from pathMapping and normalize all wiki links to relative paths
const currentContent = await plugin.app.vault.read(processedFile);
const updatedContent = updateMarkdownAssetLinks({
content: currentContent,
oldPathToNewPath: assetImportResult.pathMapping,
targetFile: processedFile,
app: plugin.app,
originalNodePath,
});

if (updatedContent !== currentContent) {
await plugin.app.vault.modify(processedFile, updatedContent);
}

// Log asset import errors if any
Expand Down Expand Up @@ -1426,3 +1480,17 @@ export const refreshAllImportedFiles = async (

return { success: successCount, failed: failedCount, errors };
};

const encodePathForMarkdownLink = (linkPath: string): string => {
// Decode the full path first so %2F becomes / and we split into real segments; then encode each segment (spaces → %20) but keep / as separator so we never emit %2F
let decoded: string;
try {
decoded = decodeURIComponent(linkPath);
} catch {
decoded = linkPath;
}
return decoded
.split("/")
.map((segment) => encodeURIComponent(segment))
.join("/");
};