mirror of
https://github.com/woodchen-ink/obsidian-publish-to-discourse.git
synced 2025-07-17 21:32:05 +08:00
Adding a routine that expands all embedded linked notes into the content variable using a recursive approach
This commit is contained in:
parent
30434e1fcc
commit
54d1b95660
187
src/expand-embeds.ts
Normal file
187
src/expand-embeds.ts
Normal file
@ -0,0 +1,187 @@
|
||||
/**
|
||||
* expandEmbeds.ts
|
||||
*
|
||||
* This module provides a recursive function to expand Obsidian-style embedded links (`![[file]]` and `![[file#heading]]`)
|
||||
* by inlining their referenced content. It supports:
|
||||
*
|
||||
* - Resolving full-note and section-level embeds (`![[file#subsection]]`)
|
||||
* - Expanding embedded content recursively while preventing infinite loops
|
||||
* - Handling multiple instances of the same embedded section within the same note
|
||||
* - Skipping the expansion of images (`png, jpg, gif, etc.`) and PDFs while preserving their original links
|
||||
*
|
||||
* The function maintains a call stack to track active expansions, ensuring that duplicate references in the same
|
||||
* recursion chain are ignored (to prevent loops), while allowing repeated embeds elsewhere in the note.
|
||||
*/
|
||||
|
||||
import { App, TFile, CachedMetadata } from "obsidian";
|
||||
|
||||
/**
|
||||
* Recursively expands embedded content (including subpath references),
|
||||
* allowing the same (file+subpath) to appear multiple times if it's *not*
|
||||
* in the same immediate recursion stack.
|
||||
*/
|
||||
export async function expandEmbeds(
|
||||
app: App,
|
||||
file: TFile,
|
||||
stack: string[] = [],
|
||||
subpath?: string
|
||||
): Promise<string> {
|
||||
const sp = subpath ?? "<entireFile>";
|
||||
const currentKey = `${file.path}::${sp}`;
|
||||
|
||||
// If it's already on the current expansion stack, we have a cycle => skip
|
||||
if (stack.includes(currentKey)) {
|
||||
return "";
|
||||
}
|
||||
|
||||
// Push it on stack
|
||||
stack.push(currentKey);
|
||||
|
||||
// Now do the usual reading
|
||||
const raw = await app.vault.read(file);
|
||||
const embedRegex = /!\[\[([^\]]+)\]\]/g;
|
||||
|
||||
// We'll do a standard async replacement
|
||||
const expandedWholeFile = await replaceAsync(raw, embedRegex, async (fullMatch, link) => {
|
||||
let [filePart] = link.split("|");
|
||||
|
||||
let sub: string | undefined;
|
||||
const hashIndex = filePart.indexOf("#");
|
||||
if (hashIndex >= 0) {
|
||||
sub = filePart.substring(hashIndex + 1).trim();
|
||||
filePart = filePart.substring(0, hashIndex).trim();
|
||||
}
|
||||
|
||||
const linkedTFile = app.metadataCache.getFirstLinkpathDest(filePart, file.path);
|
||||
if (!linkedTFile) {
|
||||
// The file doesn't exist
|
||||
return "";
|
||||
}
|
||||
|
||||
// If it's an image, keep the link
|
||||
if (isIgnoredFile(linkedTFile)) {
|
||||
return fullMatch;
|
||||
}
|
||||
|
||||
// Recursively expand that subpath
|
||||
return expandEmbeds(app, linkedTFile, stack, sub);
|
||||
});
|
||||
|
||||
// Pop it from stack
|
||||
stack.pop();
|
||||
|
||||
// If subpath was specified, slice out that portion
|
||||
if (subpath) {
|
||||
return sliceSubpathContent(app, file, expandedWholeFile, subpath);
|
||||
}
|
||||
|
||||
return expandedWholeFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the user references a heading or a block (e.g. "#Heading" or "#^blockID"),
|
||||
* we slice out just that portion from the fully expanded content.
|
||||
*/
|
||||
function sliceSubpathContent(
|
||||
app: App,
|
||||
tfile: TFile,
|
||||
fileContent: string,
|
||||
subpath: string
|
||||
): string {
|
||||
const fileCache = app.metadataCache.getFileCache(tfile);
|
||||
if (!fileCache) return fileContent;
|
||||
|
||||
// Block reference => if subpath starts with '^'
|
||||
if (subpath.startsWith("^")) {
|
||||
const blockId = subpath.slice(1);
|
||||
const block = fileCache.blocks?.[blockId];
|
||||
if (!block) {
|
||||
return "";
|
||||
}
|
||||
const { start, end } = block.position;
|
||||
if (!end) {
|
||||
// Goes to EOF if no explicit end
|
||||
return fileContent.substring(start.offset);
|
||||
} else {
|
||||
return fileContent.substring(start.offset, end.offset);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise treat it as a heading
|
||||
return sliceHeading(fileContent, fileCache, subpath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a heading by case-insensitive match and returns everything until
|
||||
* the next heading of the same or shallower level.
|
||||
*/
|
||||
function sliceHeading(content: string, fileCache: CachedMetadata, headingName: string): string {
|
||||
if (!fileCache.headings) return content;
|
||||
const target = headingName.toLowerCase();
|
||||
|
||||
// Step 1: find the heading
|
||||
let foundHeadingIndex = -1;
|
||||
for (let i = 0; i < fileCache.headings.length; i++) {
|
||||
if (fileCache.headings[i].heading.toLowerCase() === target) {
|
||||
foundHeadingIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (foundHeadingIndex === -1) {
|
||||
return ""; // no heading matched
|
||||
}
|
||||
|
||||
// Step 2: find the end offset for that heading's section
|
||||
const heading = fileCache.headings[foundHeadingIndex];
|
||||
const startOffset = heading.position.start.offset;
|
||||
const thisLevel = heading.level;
|
||||
|
||||
// We'll search forward for the next heading of the same or shallower level
|
||||
let endOffset = content.length;
|
||||
for (let j = foundHeadingIndex + 1; j < fileCache.headings.length; j++) {
|
||||
const h = fileCache.headings[j];
|
||||
if (h.level <= thisLevel) {
|
||||
endOffset = h.position.start.offset;
|
||||
break;
|
||||
}
|
||||
}
|
||||
console.log(`"Sliceheading for ${heading}, level ${thisLevel}, offsets ${startOffset} and ${endOffset}."`)
|
||||
|
||||
return content.substring(startOffset, endOffset).trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a TFile is an image or PDF by extension
|
||||
*/
|
||||
function isIgnoredFile(file: TFile): boolean {
|
||||
const imageExtensions = ["png", "jpg", "jpeg", "gif", "bmp", "svg", "webp", "pdf"];
|
||||
return imageExtensions.includes(file.extension.toLowerCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper for asynchronous regex replacements
|
||||
*/
|
||||
async function replaceAsync(
|
||||
str: string,
|
||||
regex: RegExp,
|
||||
asyncFn: (match: string, ...args: any[]) => Promise<string>
|
||||
): Promise<string> {
|
||||
const matches: Array<{ match: string; args: any[]; index: number }> = [];
|
||||
let m: RegExpExecArray | null;
|
||||
while ((m = regex.exec(str)) !== null) {
|
||||
matches.push({ match: m[0], args: m.slice(1), index: m.index });
|
||||
}
|
||||
|
||||
let result = "";
|
||||
let lastIndex = str.length;
|
||||
for (let i = matches.length - 1; i >= 0; i--) {
|
||||
const { match, args, index } = matches[i];
|
||||
const afterMatchIndex = index + match.length;
|
||||
const replacement = await asyncFn(match, ...args);
|
||||
result = str.substring(afterMatchIndex, lastIndex) + result;
|
||||
result = replacement + result;
|
||||
lastIndex = index;
|
||||
}
|
||||
result = str.substring(0, lastIndex) + result;
|
||||
return result;
|
||||
}
|
@ -2,6 +2,7 @@ import { App, Menu, MenuItem, Plugin, Modal, requestUrl, TFile, moment } from 'o
|
||||
import { DEFAULT_SETTINGS, DiscourseSyncSettings, DiscourseSyncSettingsTab } from './config';
|
||||
import * as yaml from 'yaml';
|
||||
import { t, setLocale } from './i18n';
|
||||
import { expandEmbeds } from './expand-embeds';
|
||||
|
||||
export default class DiscourseSyncPlugin extends Plugin {
|
||||
settings: DiscourseSyncSettings;
|
||||
@ -385,7 +386,7 @@ export default class DiscourseSyncPlugin extends Plugin {
|
||||
const syncDiscourse = (item: MenuItem) => {
|
||||
item.setTitle(t('PUBLISH_TO_DISCOURSE'));
|
||||
item.onClick(async () => {
|
||||
const content = await this.app.vault.read(file);
|
||||
const content = await expandEmbeds(this.app, file);
|
||||
const fm = this.getFrontMatter(content);
|
||||
this.activeFile = {
|
||||
name: file.basename,
|
||||
|
Loading…
x
Reference in New Issue
Block a user