Skip to content

Commit

Permalink
refactor(mdx-loader): read metadata from memory (loaded content) inst…
Browse files Browse the repository at this point in the history
…ead of fs (#10457)

* mdx loader shouldn't read metadata from file system but from memory

* comments

* refactor: apply lint autofix

* apply same for blog

* apply same for blog

* refactor: apply lint autofix

* apply same for pages
  • Loading branch information
slorber authored Aug 30, 2024
1 parent 2aef92c commit a4329d3
Show file tree
Hide file tree
Showing 8 changed files with 182 additions and 80 deletions.
55 changes: 38 additions & 17 deletions packages/docusaurus-mdx-loader/src/loader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,26 @@ type Pluggable = any; // TODO fix this asap

export type MDXPlugin = Pluggable;

// This represents the path to the mdx metadata bundle path + its loaded content
export type LoadedMetadata = {
metadataPath: string;
metadataContent: unknown;
};

export type Options = Partial<MDXOptions> & {
markdownConfig: MarkdownConfig;
staticDirs: string[];
siteDir: string;
isMDXPartial?: (filePath: string) => boolean;
isMDXPartialFrontMatterWarningDisabled?: boolean;
removeContentTitle?: boolean;
metadataPath?: string | ((filePath: string) => string);

// TODO Docusaurus v4: rename to just "metadata"?
// We kept retro-compatibility in v3 in case plugins/sites use mdx loader
metadataPath?: string | ((filePath: string) => string | LoadedMetadata);
createAssets?: (metadata: {
frontMatter: {[key: string]: unknown};
metadata: {[key: string]: unknown};
metadata: unknown;
}) => {[key: string]: unknown};
resolveMarkdownLink?: ResolveMarkdownLink;

Expand Down Expand Up @@ -103,32 +112,40 @@ ${JSON.stringify(frontMatter, null, 2)}`;
}
}

function getMetadataPath(): string | undefined {
async function loadMetadata(): Promise<LoadedMetadata | undefined> {
if (!isMDXPartial) {
// Read metadata for this MDX and export it.
if (options.metadataPath && typeof options.metadataPath === 'function') {
return options.metadataPath(filePath);
const metadata = options.metadataPath(filePath);
if (!metadata) {
return undefined;
}
if (typeof metadata === 'string') {
return {
metadataPath: metadata,
metadataContent: await readMetadataPath(metadata),
};
}
if (!metadata.metadataPath) {
throw new Error(`Metadata path missing for file ${filePath}`);
}
if (!metadata.metadataContent) {
throw new Error(`Metadata content missing for file ${filePath}`);
}
return metadata;
}
}
return undefined;
}

const metadataPath = getMetadataPath();
if (metadataPath) {
this.addDependency(metadataPath);
const metadata = await loadMetadata();
if (metadata) {
this.addDependency(metadata.metadataPath);
}

const metadataJsonString = metadataPath
? await readMetadataPath(metadataPath)
: undefined;

const metadata = metadataJsonString
? (JSON.parse(metadataJsonString) as {[key: string]: unknown})
: undefined;

const assets =
options.createAssets && metadata
? options.createAssets({frontMatter, metadata})
? options.createAssets({frontMatter, metadata: metadata.metadataContent})
: undefined;

const fileLoaderUtils = getFileLoaderUtils(compilerName === 'server');
Expand All @@ -138,7 +155,11 @@ ${JSON.stringify(frontMatter, null, 2)}`;
const exportsCode = `
export const frontMatter = ${stringifyObject(frontMatter)};
export const contentTitle = ${stringifyObject(contentTitle)};
${metadataJsonString ? `export const metadata = ${metadataJsonString};` : ''}
${
metadata
? `export const metadata = ${JSON.stringify(metadata.metadataContent)};`
: ''
}
${
assets
? `export const assets = ${createAssetsExportCode({
Expand Down
4 changes: 2 additions & 2 deletions packages/docusaurus-mdx-loader/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ import type {Options} from './loader';
* starting with _). That's why it's important to provide the `isMDXPartial`
* function in config
*/
export async function readMetadataPath(metadataPath: string): Promise<string> {
export async function readMetadataPath(metadataPath: string): Promise<unknown> {
try {
return await fs.readFile(metadataPath, 'utf8');
return await fs.readJSON(metadataPath, 'utf8');
} catch (error) {
throw new Error(
logger.interpolate`MDX loader can't read MDX metadata file path=${metadataPath}. Maybe the isMDXPartial option function was not provided?`,
Expand Down
35 changes: 35 additions & 0 deletions packages/docusaurus-plugin-content-blog/src/contentHelpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

import type {BlogContent, BlogPost} from '@docusaurus/plugin-content-blog';

function indexBlogPostsBySource(content: BlogContent): Map<string, BlogPost> {
return new Map(
content.blogPosts.map((blogPost) => [blogPost.metadata.source, blogPost]),
);
}

// TODO this is bad, we should have a better way to do this (new lifecycle?)
// The source to blog/permalink is a mutable map passed to the mdx loader
// See https://github.com/facebook/docusaurus/pull/10457
// See https://github.com/facebook/docusaurus/pull/10185
export function createContentHelpers() {
const sourceToBlogPost = new Map<string, BlogPost>();
const sourceToPermalink = new Map<string, string>();

// Mutable map update :/
function updateContent(content: BlogContent): void {
sourceToBlogPost.clear();
sourceToPermalink.clear();
indexBlogPostsBySource(content).forEach((value, key) => {
sourceToBlogPost.set(key, value);
sourceToPermalink.set(key, value.metadata.permalink);
});
}

return {updateContent, sourceToBlogPost, sourceToPermalink};
}
46 changes: 14 additions & 32 deletions packages/docusaurus-plugin-content-blog/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import {
getDataFilePath,
DEFAULT_PLUGIN_ID,
resolveMarkdownLinkPathname,
type SourceToPermalink,
} from '@docusaurus/utils';
import {getTagsFilePathsToWatch} from '@docusaurus/utils-validation';
import {
Expand All @@ -40,6 +39,7 @@ import {createBlogFeedFiles, createFeedHtmlHeadTags} from './feed';

import {createAllRoutes} from './routes';
import {checkAuthorsMapPermalinkCollisions, getAuthorsMap} from './authorsMap';
import {createContentHelpers} from './contentHelpers';
import type {BlogContentPaths, BlogMarkdownLoaderOptions} from './types';
import type {LoadContext, Plugin} from '@docusaurus/types';
import type {
Expand All @@ -55,33 +55,6 @@ import type {RuleSetRule, RuleSetUseItem} from 'webpack';

const PluginName = 'docusaurus-plugin-content-blog';

// TODO this is bad, we should have a better way to do this (new lifecycle?)
// The source to permalink is currently a mutable map passed to the mdx loader
// for link resolution
// see https://github.com/facebook/docusaurus/pull/10185
function createSourceToPermalinkHelper() {
const sourceToPermalink: SourceToPermalink = new Map();

function computeSourceToPermalink(content: BlogContent): SourceToPermalink {
return new Map(
content.blogPosts.map(({metadata: {source, permalink}}) => [
source,
permalink,
]),
);
}

// Mutable map update :/
function update(content: BlogContent): void {
sourceToPermalink.clear();
computeSourceToPermalink(content).forEach((value, key) => {
sourceToPermalink.set(key, value);
});
}

return {get: () => sourceToPermalink, update};
}

export default async function pluginContentBlog(
context: LoadContext,
options: PluginOptions,
Expand Down Expand Up @@ -128,7 +101,7 @@ export default async function pluginContentBlog(
contentPaths,
});

const sourceToPermalinkHelper = createSourceToPermalinkHelper();
const contentHelpers = createContentHelpers();

async function createBlogMDXLoaderRule(): Promise<RuleSetRule> {
const {
Expand Down Expand Up @@ -162,7 +135,16 @@ export default async function pluginContentBlog(
// Note that metadataPath must be the same/in-sync as
// the path from createData for each MDX.
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
const metadataPath = path.join(
dataDir,
`${docuHash(aliasedPath)}.json`,
);
const metadataContent =
contentHelpers.sourceToBlogPost.get(aliasedPath)!.metadata;
return {
metadataPath,
metadataContent,
};
},
// For blog posts a title in markdown is always removed
// Blog posts title are rendered separately
Expand All @@ -184,7 +166,7 @@ export default async function pluginContentBlog(
resolveMarkdownLink: ({linkPathname, sourceFilePath}) => {
const permalink = resolveMarkdownLinkPathname(linkPathname, {
sourceFilePath,
sourceToPermalink: sourceToPermalinkHelper.get(),
sourceToPermalink: contentHelpers.sourceToPermalink,
siteDir,
contentPaths,
});
Expand Down Expand Up @@ -352,7 +334,7 @@ export default async function pluginContentBlog(
},

async contentLoaded({content, actions}) {
sourceToPermalinkHelper.update(content);
contentHelpers.updateContent(content);

await createAllRoutes({
baseUrl,
Expand Down
34 changes: 34 additions & 0 deletions packages/docusaurus-plugin-content-docs/src/contentHelpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

import type {DocMetadata, LoadedContent} from '@docusaurus/plugin-content-docs';

function indexDocsBySource(content: LoadedContent): Map<string, DocMetadata> {
const allDocs = content.loadedVersions.flatMap((v) => v.docs);
return new Map(allDocs.map((doc) => [doc.source, doc]));
}

// TODO this is bad, we should have a better way to do this (new lifecycle?)
// The source to doc/permalink is a mutable map passed to the mdx loader
// See https://github.com/facebook/docusaurus/pull/10457
// See https://github.com/facebook/docusaurus/pull/10185
export function createContentHelpers() {
const sourceToDoc = new Map<string, DocMetadata>();
const sourceToPermalink = new Map<string, string>();

// Mutable map update :/
function updateContent(content: LoadedContent): void {
sourceToDoc.clear();
sourceToPermalink.clear();
indexDocsBySource(content).forEach((value, key) => {
sourceToDoc.set(key, value);
sourceToPermalink.set(key, value.permalink);
});
}

return {updateContent, sourceToDoc, sourceToPermalink};
}
41 changes: 13 additions & 28 deletions packages/docusaurus-plugin-content-docs/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import {
createSlugger,
resolveMarkdownLinkPathname,
DEFAULT_PLUGIN_ID,
type SourceToPermalink,
type TagsFile,
} from '@docusaurus/utils';
import {
Expand Down Expand Up @@ -54,6 +53,7 @@ import {
import {createAllRoutes} from './routes';
import {createSidebarsUtils} from './sidebars/utils';

import {createContentHelpers} from './contentHelpers';
import type {
PluginOptions,
DocMetadataBase,
Expand All @@ -66,29 +66,6 @@ import type {LoadContext, Plugin} from '@docusaurus/types';
import type {DocFile, FullVersion} from './types';
import type {RuleSetRule} from 'webpack';

// TODO this is bad, we should have a better way to do this (new lifecycle?)
// The source to permalink is currently a mutable map passed to the mdx loader
// for link resolution
// see https://github.com/facebook/docusaurus/pull/10185
function createSourceToPermalinkHelper() {
const sourceToPermalink: SourceToPermalink = new Map();

function computeSourceToPermalink(content: LoadedContent): SourceToPermalink {
const allDocs = content.loadedVersions.flatMap((v) => v.docs);
return new Map(allDocs.map(({source, permalink}) => [source, permalink]));
}

// Mutable map update :/
function update(content: LoadedContent): void {
sourceToPermalink.clear();
computeSourceToPermalink(content).forEach((value, key) => {
sourceToPermalink.set(key, value);
});
}

return {get: () => sourceToPermalink, update};
}

export default async function pluginContentDocs(
context: LoadContext,
options: PluginOptions,
Expand All @@ -115,7 +92,7 @@ export default async function pluginContentDocs(
// TODO env should be injected into all plugins
const env = process.env.NODE_ENV as DocEnv;

const sourceToPermalinkHelper = createSourceToPermalinkHelper();
const contentHelpers = createContentHelpers();

async function createDocsMDXLoaderRule(): Promise<RuleSetRule> {
const {
Expand Down Expand Up @@ -146,7 +123,15 @@ export default async function pluginContentDocs(
// Note that metadataPath must be the same/in-sync as
// the path from createData for each MDX.
const aliasedPath = aliasedSitePath(mdxPath, siteDir);
return path.join(dataDir, `${docuHash(aliasedPath)}.json`);
const metadataPath = path.join(
dataDir,
`${docuHash(aliasedPath)}.json`,
);
const metadataContent = contentHelpers.sourceToDoc.get(aliasedPath);
return {
metadataPath,
metadataContent,
};
},
// Assets allow to convert some relative images paths to
// require(...) calls
Expand All @@ -161,7 +146,7 @@ export default async function pluginContentDocs(
);
const permalink = resolveMarkdownLinkPathname(linkPathname, {
sourceFilePath,
sourceToPermalink: sourceToPermalinkHelper.get(),
sourceToPermalink: contentHelpers.sourceToPermalink,
siteDir,
contentPaths: version,
});
Expand Down Expand Up @@ -335,7 +320,7 @@ export default async function pluginContentDocs(
},

async contentLoaded({content, actions}) {
sourceToPermalinkHelper.update(content);
contentHelpers.updateContent(content);

const versions: FullVersion[] = content.loadedVersions.map(toFullVersion);

Expand Down
33 changes: 33 additions & 0 deletions packages/docusaurus-plugin-content-pages/src/contentHelpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

import type {LoadedContent, Metadata} from '@docusaurus/plugin-content-pages';

function indexPagesBySource(content: LoadedContent): Map<string, Metadata> {
return new Map(content.map((page) => [page.source, page]));
}

// TODO this is bad, we should have a better way to do this (new lifecycle?)
// The source to page/permalink is a mutable map passed to the mdx loader
// See https://github.com/facebook/docusaurus/pull/10457
// See https://github.com/facebook/docusaurus/pull/10185
export function createContentHelpers() {
const sourceToPage = new Map<string, Metadata>();
// const sourceToPermalink = new Map<string, string>();

// Mutable map update :/
function updateContent(content: LoadedContent): void {
sourceToPage.clear();
// sourceToPermalink.clear();
indexPagesBySource(content).forEach((value, key) => {
sourceToPage.set(key, value);
// sourceToPermalink.set(key, value.metadata.permalink);
});
}

return {updateContent, sourceToPage};
}
Loading

0 comments on commit a4329d3

Please sign in to comment.