diff --git a/package-lock.json b/package-lock.json index bd4633ad..46c488a0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@node-core/doc-kit", - "version": "1.0.1", + "version": "1.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@node-core/doc-kit", - "version": "1.0.1", + "version": "1.1.0", "dependencies": { "@actions/core": "^3.0.0", "@heroicons/react": "^2.2.0", @@ -23,6 +23,7 @@ "estree-util-to-js": "^2.0.0", "estree-util-visit": "^2.0.0", "github-slugger": "^2.0.0", + "glob-parent": "^6.0.2", "globals": "^17.3.0", "hast-util-to-string": "^3.0.1", "hastscript": "^9.0.1", @@ -51,7 +52,6 @@ "unist-util-remove": "^4.0.0", "unist-util-select": "^5.1.0", "unist-util-visit": "^5.1.0", - "vfile": "^6.0.3", "yaml": "^2.8.2" }, "bin": { @@ -1046,6 +1046,7 @@ "resolved": "https://registry.npmjs.org/@orama/core/-/core-1.2.19.tgz", "integrity": "sha512-AVEI0eG/a1RUQK+tBloRMppQf46Ky4kIYKEVjo0V0VfIGZHdLOE2PJR4v949kFwiTnfSJCUaxgwM74FCA1uHUA==", "license": "AGPL-3.0", + "peer": true, "dependencies": { "@orama/cuid2": "2.2.3", "@orama/oramacore-events-parser": "0.0.5" @@ -3159,6 +3160,7 @@ "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.14.tgz", "integrity": "sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==", "license": "MIT", + "peer": true, "dependencies": { "csstype": "^3.2.2" } @@ -3332,6 +3334,7 @@ "integrity": "sha512-9Cnda8GS57AQakvRyG0PTejJNlA2xhvyNtEVIMlDWOOeEyBkYWhGPnfrIAnqxLMTSTo6q8g12XVjjev5l1NvMA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", "@typescript-eslint/scope-manager": "8.54.0", @@ -3721,6 +3724,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -4509,6 +4513,7 @@ "integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", @@ -5148,7 +5153,6 @@ "version": "6.0.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, "license": "ISC", "dependencies": { "is-glob": "^4.0.3" @@ -7367,6 +7371,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -7509,6 +7514,7 @@ "resolved": "https://registry.npmjs.org/preact/-/preact-10.28.4.tgz", "integrity": "sha512-uKFfOHWuSNpRFVTnljsCluEFq57OKT+0QdOiQo8XWnQ/pSvg7OpX5eNOejELXJMWy+BwM2nobz0FkvzmnpCNsQ==", "license": "MIT", + "peer": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/preact" @@ -7592,6 +7598,7 @@ "resolved": "https://registry.npmjs.org/react/-/react-19.2.4.tgz", "integrity": "sha512-9nfp2hYpCwOjAN+8TZFGhtWEwgvWHXqESH8qT89AT/lWklpLON22Lc8pEtnpsZz7VmawabSU0gCjnj8aC0euHQ==", "license": "MIT", + "peer": true, "engines": { "node": ">=0.10.0" } @@ -8039,8 +8046,7 @@ "version": "0.26.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/semver": { "version": "7.7.4", @@ -8516,6 +8522,7 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -8679,6 +8686,7 @@ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8877,6 +8885,7 @@ "integrity": "sha512-VUyWiTNQD7itdiMuJy+EuLEErLj3uwX/EpHQF8EOf33Dq3Ju6VW1GXm+swk6+1h7a49uv9fKZ+dft9jU7esdLA==", "dev": true, "hasInstallScript": true, + "peer": true, "dependencies": { "napi-postinstall": "^0.2.4" }, diff --git a/package.json b/package.json index 16c3aacb..118808a5 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@node-core/doc-kit", "type": "module", - "version": "1.0.1", + "version": "1.1.0", "repository": { "type": "git", "url": "git+https://github.com/nodejs/doc-kit.git" @@ -57,6 +57,7 @@ "estree-util-to-js": "^2.0.0", "estree-util-visit": "^2.0.0", "github-slugger": "^2.0.0", + "glob-parent": "^6.0.2", "globals": "^17.3.0", "hast-util-to-string": "^3.0.1", "hastscript": "^9.0.1", @@ -85,7 +86,6 @@ "unist-util-remove": "^4.0.0", "unist-util-select": "^5.1.0", "unist-util-visit": "^5.1.0", - "vfile": "^6.0.3", "yaml": "^2.8.2" } } diff --git a/src/generators/addon-verify/generate.mjs b/src/generators/addon-verify/generate.mjs index 10f45c95..91f9b463 100644 --- a/src/generators/addon-verify/generate.mjs +++ b/src/generators/addon-verify/generate.mjs @@ -1,6 +1,6 @@ 'use strict'; -import { mkdir, writeFile } from 'node:fs/promises'; +import { mkdir } from 'node:fs/promises'; import { join } from 'node:path'; import { visit } from 'unist-util-visit'; @@ -13,6 +13,7 @@ import { normalizeSectionName, } from './utils/section.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * Generates a file list from code blocks. diff --git a/src/generators/api-links/generate.mjs b/src/generators/api-links/generate.mjs index fca10a01..a5a00a2d 100644 --- a/src/generators/api-links/generate.mjs +++ b/src/generators/api-links/generate.mjs @@ -1,6 +1,5 @@ 'use strict'; -import { writeFile } from 'node:fs/promises'; import { basename, join } from 'node:path'; import { checkIndirectReferences } from './utils/checkIndirectReferences.mjs'; @@ -11,6 +10,7 @@ import { GITHUB_BLOB_URL, populate, } from '../../utils/configuration/templates.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * Generates the `apilinks.json` file. diff --git a/src/generators/ast/generate.mjs b/src/generators/ast/generate.mjs index 65bcdb66..eed01d42 100644 --- a/src/generators/ast/generate.mjs +++ b/src/generators/ast/generate.mjs @@ -1,13 +1,14 @@ 'use strict'; import { readFile } from 'node:fs/promises'; -import { extname } from 'node:path'; +import { relative, sep } from 'node:path/posix'; +import globParent from 'glob-parent'; import { globSync } from 'tinyglobby'; -import { VFile } from 'vfile'; import { STABILITY_INDEX_URL } from './constants.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { withExt } from '../../utils/file.mjs'; import { QUERIES } from '../../utils/queries/index.mjs'; import { getRemark } from '../../utils/remark.mjs'; @@ -24,19 +25,19 @@ export async function processChunk(inputSlice, itemIndices) { const results = []; - for (const path of filePaths) { + for (const [path, parent] of filePaths) { const content = await readFile(path, 'utf-8'); - const vfile = new VFile({ - path, - value: content.replace( - QUERIES.stabilityIndexPrefix, - match => `[${match}](${STABILITY_INDEX_URL})` - ), - }); + const value = content.replace( + QUERIES.stabilityIndexPrefix, + match => `[${match}](${STABILITY_INDEX_URL})` + ); + + const relativePath = sep + withExt(relative(parent, path)); results.push({ - tree: remarkProcessor.parse(vfile), - file: { stem: vfile.stem, basename: vfile.basename }, + tree: remarkProcessor.parse(value), + // The path is the relative path minus the extension + path: relativePath, }); } @@ -51,9 +52,14 @@ export async function processChunk(inputSlice, itemIndices) { export async function* generate(_, worker) { const { ast: config } = getConfig(); - const files = globSync(config.input, { ignore: config.ignore }).filter( - p => extname(p) === '.md' - ); + const files = config.input.flatMap(input => { + const parent = globParent(input); + + return globSync(input, { ignore: config.ignore }).map(child => [ + child, + parent, + ]); + }); // Parse markdown files in parallel using worker threads for await (const chunkResult of worker.stream(files)) { diff --git a/src/generators/json-simple/generate.mjs b/src/generators/json-simple/generate.mjs index 7fefda7b..82d06a99 100644 --- a/src/generators/json-simple/generate.mjs +++ b/src/generators/json-simple/generate.mjs @@ -1,11 +1,11 @@ 'use strict'; -import { writeFile } from 'node:fs/promises'; import { join } from 'node:path'; import { remove } from 'unist-util-remove'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; import { UNIST } from '../../utils/queries/index.mjs'; /** diff --git a/src/generators/jsx-ast/generate.mjs b/src/generators/jsx-ast/generate.mjs index 9a5fae33..394ce0fe 100644 --- a/src/generators/jsx-ast/generate.mjs +++ b/src/generators/jsx-ast/generate.mjs @@ -1,7 +1,7 @@ import { buildSideBarProps } from './utils/buildBarProps.mjs'; import buildContent from './utils/buildContent.mjs'; import { getSortedHeadNodes } from './utils/getSortedHeadNodes.mjs'; -import getConfig from '../../utils/configuration/index.mjs'; +import { href } from '../../utils/file.mjs'; import { groupNodesByModule } from '../../utils/generators.mjs'; import { getRemarkRecma } from '../../utils/remark.mjs'; @@ -22,7 +22,15 @@ export async function processChunk(slicedInput, itemIndices, docPages) { for (const idx of itemIndices) { const { head, entries } = slicedInput[idx]; - const sideBarProps = buildSideBarProps(head, docPages); + const sideBarProps = buildSideBarProps( + head, + docPages.map(([heading, path]) => [ + heading, + head.path === path + ? `${head.basename}.html` + : `${href(path, head.path)}.html`, + ]) + ); const content = await buildContent( entries, @@ -43,17 +51,11 @@ export async function processChunk(slicedInput, itemIndices, docPages) { * @type {import('./types').Generator['generate']} */ export async function* generate(input, worker) { - const config = getConfig('jsx-ast'); - const groupedModules = groupNodesByModule(input); const headNodes = getSortedHeadNodes(input); - // Pre-compute docPages once in main thread - // TODO(@avivkeller): Load the index file here instead of during configuration - const docPages = config.index - ? config.index.map(({ section, api }) => [section, `${api}.html`]) - : headNodes.map(node => [node.heading.data.name, `${node.api}.html`]); + const docPages = headNodes.map(node => [node.heading.data.name, node.path]); // Create sliced input: each item contains head + its module's entries // This avoids sending all 4700+ entries to every worker diff --git a/src/generators/jsx-ast/utils/__tests__/buildBarProps.test.mjs b/src/generators/jsx-ast/utils/__tests__/buildBarProps.test.mjs index 1953ef24..fe9da9d1 100644 --- a/src/generators/jsx-ast/utils/__tests__/buildBarProps.test.mjs +++ b/src/generators/jsx-ast/utils/__tests__/buildBarProps.test.mjs @@ -69,7 +69,8 @@ describe('extractTextContent', () => { describe('buildMetaBarProps', () => { it('creates meta bar properties from entries', () => { const head = { - api: 'fs', + basename: 'fs', + path: '/fs', added: 'v1.0.0', }; @@ -151,7 +152,8 @@ describe('formatVersionOptions', () => { describe('buildSideBarProps', () => { it('creates sidebar properties with versions and navigation', () => { const entry = { - api: 'http', + path: 'http', + basename: 'http', introduced_in: 'v0.10.0', }; diff --git a/src/generators/jsx-ast/utils/buildBarProps.mjs b/src/generators/jsx-ast/utils/buildBarProps.mjs index 1c3a184b..0144c3a4 100644 --- a/src/generators/jsx-ast/utils/buildBarProps.mjs +++ b/src/generators/jsx-ast/utils/buildBarProps.mjs @@ -90,10 +90,10 @@ export const buildMetaBarProps = (head, entries) => { addedIn: head.added || head.introduced_in || '', readingTime: readingTime(extractTextContent(entries)).text, viewAs: [ - ['JSON', `${head.api}.json`], - ['MD', `${head.api}.md`], + ['JSON', `${head.basename}.json`], + ['MD', `${head.basename}.md`], ], - editThisPage: `${populate(GITHUB_EDIT_URL, config)}${head.api}.md`, + editThisPage: `${populate(GITHUB_EDIT_URL, config)}${head.path}.md`, }; }; @@ -101,14 +101,14 @@ export const buildMetaBarProps = (head, entries) => { * Converts a compatible version entry into a version label and link. * * @param {Array} compatibleVersions - Compatible versions - * @param {string} api - API identifier (used in link) + * @param {string} path - path for the version URL */ -export const formatVersionOptions = (compatibleVersions, api) => { +export const formatVersionOptions = (compatibleVersions, path) => { const config = getConfig('jsx-ast'); return compatibleVersions.map(({ version, isLts, isCurrent }) => { const parsed = getVersionFromSemVer(version); - const value = getVersionURL(parsed, api, config.baseURL); + const value = getVersionURL(parsed, path, config.baseURL); let label = `v${parsed}`; @@ -143,9 +143,9 @@ export const buildSideBarProps = (entry, docPages) => { ); return { - versions: formatVersionOptions(compatibleVersions, entry.api), + versions: formatVersionOptions(compatibleVersions, entry.path), currentVersion: `v${config.version.version}`, - pathname: `${entry.api}.html`, + pathname: `${entry.basename}.html`, docPages, }; }; diff --git a/src/generators/legacy-html-all/generate.mjs b/src/generators/legacy-html-all/generate.mjs index 7849e235..ad4e2ca1 100644 --- a/src/generators/legacy-html-all/generate.mjs +++ b/src/generators/legacy-html-all/generate.mjs @@ -49,6 +49,7 @@ export async function generate(input) { const templateValues = { api: 'all', + path: 'all', added: '', section: 'All', version: `v${config.version.version}`, diff --git a/src/generators/legacy-html/generate.mjs b/src/generators/legacy-html/generate.mjs index df95de46..cff2698d 100644 --- a/src/generators/legacy-html/generate.mjs +++ b/src/generators/legacy-html/generate.mjs @@ -1,13 +1,13 @@ 'use strict'; -import { readFile, writeFile, mkdir } from 'node:fs/promises'; +import { readFile, cp } from 'node:fs/promises'; import { basename, join } from 'node:path'; import buildContent from './utils/buildContent.mjs'; import { replaceTemplateValues } from './utils/replaceTemplateValues.mjs'; -import { safeCopy } from './utils/safeCopy.mjs'; import tableOfContents from './utils/tableOfContents.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; import { groupNodesByModule } from '../../utils/generators.mjs'; import { minifyHTML } from '../../utils/html-minifier.mjs'; import { getRemarkRehypeWithShiki } from '../../utils/remark.mjs'; @@ -56,6 +56,7 @@ export async function processChunk(slicedInput, itemIndices, navigation) { const template = { api: head.api, + path: head.path, added: head.introduced_in ?? '', section: head.heading.data.name || apiAsHeading, toc, @@ -106,11 +107,8 @@ export async function* generate(input, worker) { // Define the output folder for API docs assets const assetsFolder = join(config.output, basename(path)); - // Creates the assets folder if it does not exist - await mkdir(assetsFolder, { recursive: true }); - - // Copy all files from assets folder to output, skipping unchanged files - await safeCopy(path, assetsFolder); + // Copy all files from assets folder to output + await cp(path, assetsFolder, { recursive: true }); } } diff --git a/src/generators/legacy-html/utils/__tests__/safeCopy.test.mjs b/src/generators/legacy-html/utils/__tests__/safeCopy.test.mjs deleted file mode 100644 index 186020a8..00000000 --- a/src/generators/legacy-html/utils/__tests__/safeCopy.test.mjs +++ /dev/null @@ -1,122 +0,0 @@ -'use strict'; - -import assert from 'node:assert'; -import { mkdir, readFile, rm, utimes, writeFile } from 'node:fs/promises'; -import { join } from 'node:path'; -import { afterEach, beforeEach, describe, it } from 'node:test'; - -import { safeCopy } from '../safeCopy.mjs'; - -describe('safeCopy', () => { - const testDir = join(import.meta.dirname, 'test-safe-copy'); - const srcDir = join(testDir, 'src'); - const targetDir = join(testDir, 'target'); - - beforeEach(async () => { - // Create test directories - await mkdir(srcDir, { recursive: true }); - await mkdir(targetDir, { recursive: true }); - }); - - afterEach(async () => { - // Clean up test directories - await rm(testDir, { recursive: true, force: true }); - }); - - it('should copy new files that do not exist in target', async () => { - // Create a file in source - await writeFile(join(srcDir, 'file1.txt'), 'content1'); - - await safeCopy(srcDir, targetDir); - - // Verify file was copied - const content = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - assert.strictEqual(content, 'content1'); - }); - - it('should copy multiple files', async () => { - // Create multiple files in source - await writeFile(join(srcDir, 'file1.txt'), 'content1'); - await writeFile(join(srcDir, 'file2.txt'), 'content2'); - await writeFile(join(srcDir, 'file3.txt'), 'content3'); - - await safeCopy(srcDir, targetDir); - - // Verify all files were copied - const content1 = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - const content2 = await readFile(join(targetDir, 'file2.txt'), 'utf-8'); - const content3 = await readFile(join(targetDir, 'file3.txt'), 'utf-8'); - - assert.strictEqual(content1, 'content1'); - assert.strictEqual(content2, 'content2'); - assert.strictEqual(content3, 'content3'); - }); - - it('should skip files with same size and older modification time', async () => { - // Create file in source with specific size - const content = 'same content'; - await writeFile(join(srcDir, 'file1.txt'), content); - - // Make source file old - const oldTime = new Date(Date.now() - 10000); - await utimes(join(srcDir, 'file1.txt'), oldTime, oldTime); - - // Create target file with same size but different content and newer timestamp - await writeFile(join(targetDir, 'file1.txt'), 'other things'); - - await safeCopy(srcDir, targetDir); - - // Verify file was not overwritten (source is older) - const targetContent = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - assert.strictEqual(targetContent, 'other things'); - }); - - it('should copy files when source has newer modification time', async () => { - // Create files in both directories - await writeFile(join(srcDir, 'file1.txt'), 'new content'); - await writeFile(join(targetDir, 'file1.txt'), 'old content'); - - // Make target file older - const oldTime = new Date(Date.now() - 10000); - await utimes(join(targetDir, 'file1.txt'), oldTime, oldTime); - - await safeCopy(srcDir, targetDir); - - // Verify file was updated - const content = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - assert.strictEqual(content, 'new content'); - }); - - it('should copy files when sizes differ', async () => { - // Create files with different sizes - await writeFile(join(srcDir, 'file1.txt'), 'short'); - await writeFile(join(targetDir, 'file1.txt'), 'much longer content'); - - await safeCopy(srcDir, targetDir); - - // Verify file was updated - const content = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - assert.strictEqual(content, 'short'); - }); - - it('should handle empty source directory', async () => { - // Don't create any files in source - await safeCopy(srcDir, targetDir); - }); - - it('should copy files with same size but different content when mtime is newer', async () => { - // Create files with same size but different content - await writeFile(join(srcDir, 'file1.txt'), 'abcde'); - await writeFile(join(targetDir, 'file1.txt'), 'fghij'); - - // Make target older - const oldTime = new Date(Date.now() - 10000); - await utimes(join(targetDir, 'file1.txt'), oldTime, oldTime); - - await safeCopy(srcDir, targetDir); - - // Verify file was updated with source content - const content = await readFile(join(targetDir, 'file1.txt'), 'utf-8'); - assert.strictEqual(content, 'abcde'); - }); -}); diff --git a/src/generators/legacy-html/utils/buildDropdowns.mjs b/src/generators/legacy-html/utils/buildDropdowns.mjs index d47a8fce..197384e1 100644 --- a/src/generators/legacy-html/utils/buildDropdowns.mjs +++ b/src/generators/legacy-html/utils/buildDropdowns.mjs @@ -48,11 +48,11 @@ export const buildNavigation = navigationContents => * Note.: We use plain strings here instead of HAST, since these are just * templates and not actual content that needs to be transformed. * - * @param {string} api The current API node name + * @param {string} path The current API node name * @param {string} added The version the API was added * @param {Array} versions All available Node.js releases */ -export const buildVersions = (api, added, versions) => { +export const buildVersions = (path, added, versions) => { const config = getConfig('legacy-html'); const compatibleVersions = getCompatibleVersions(added, versions); @@ -64,7 +64,7 @@ export const buildVersions = (api, added, versions) => { const ltsLabel = isLts ? 'LTS' : ''; - return `
  • ${parsedVersion} ${ltsLabel}
  • `; + return `
  • ${parsedVersion} ${ltsLabel}
  • `; }); return ( diff --git a/src/generators/legacy-html/utils/replaceTemplateValues.mjs b/src/generators/legacy-html/utils/replaceTemplateValues.mjs index 460592d3..0eb78d8d 100644 --- a/src/generators/legacy-html/utils/replaceTemplateValues.mjs +++ b/src/generators/legacy-html/utils/replaceTemplateValues.mjs @@ -22,7 +22,7 @@ import { */ export const replaceTemplateValues = ( apiTemplate, - { api, added, section, toc, nav, content }, + { path, api, added, section, toc, nav, content }, config, { skipGitHub = false, skipGtocPicker = false } = {} ) => { @@ -36,11 +36,11 @@ export const replaceTemplateValues = ( .replace('__CONTENT__', content) .replace(/__TOC_PICKER__/g, buildToC(toc)) .replace(/__GTOC_PICKER__/g, skipGtocPicker ? '' : buildNavigation(nav)) - .replace('__ALTDOCS__', buildVersions(api, added, config.changelog)) + .replace('__ALTDOCS__', buildVersions(path, added, config.changelog)) .replace( '__EDIT_ON_GITHUB__', skipGitHub ? '' - : buildGitHub(`${populate(GITHUB_EDIT_URL, config)}${api}.md`) + : buildGitHub(`${populate(GITHUB_EDIT_URL, config)}${path}.md`) ); }; diff --git a/src/generators/legacy-html/utils/safeCopy.mjs b/src/generators/legacy-html/utils/safeCopy.mjs deleted file mode 100644 index e429912f..00000000 --- a/src/generators/legacy-html/utils/safeCopy.mjs +++ /dev/null @@ -1,35 +0,0 @@ -'use strict'; - -import { statSync, constants } from 'node:fs'; -import { copyFile, readdir } from 'node:fs/promises'; -import { join } from 'node:path'; - -/** - * Copies files from source to target directory, skipping files that haven't changed. - * Uses synchronous stat checks for simplicity and copyFile for atomic operations. - * - * @param {string} srcDir - Source directory path - * @param {string} targetDir - Target directory path - */ -export async function safeCopy(srcDir, targetDir) { - const files = await readdir(srcDir); - - const promises = files.map(file => { - const sourcePath = join(srcDir, file); - const targetPath = join(targetDir, file); - - const tStat = statSync(targetPath, { throwIfNoEntry: false }); - - if (tStat === undefined) { - return copyFile(sourcePath, targetPath, constants.COPYFILE_FICLONE); - } - - const sStat = statSync(sourcePath); - - if (sStat.size !== tStat.size || sStat.mtimeMs > tStat.mtimeMs) { - return copyFile(sourcePath, targetPath, constants.COPYFILE_FICLONE); - } - }); - - await Promise.all(promises); -} diff --git a/src/generators/llms-txt/generate.mjs b/src/generators/llms-txt/generate.mjs index 01f4a506..a4aada4f 100644 --- a/src/generators/llms-txt/generate.mjs +++ b/src/generators/llms-txt/generate.mjs @@ -1,10 +1,11 @@ 'use strict'; -import { readFile, writeFile } from 'node:fs/promises'; +import { readFile } from 'node:fs/promises'; import { join } from 'node:path'; import { buildApiDocLink } from './utils/buildApiDocLink.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * Generates a llms.txt file diff --git a/src/generators/llms-txt/utils/__tests__/buildApiDocLink.test.mjs b/src/generators/llms-txt/utils/__tests__/buildApiDocLink.test.mjs index cb75ee3a..6569b7da 100644 --- a/src/generators/llms-txt/utils/__tests__/buildApiDocLink.test.mjs +++ b/src/generators/llms-txt/utils/__tests__/buildApiDocLink.test.mjs @@ -65,7 +65,7 @@ describe('buildApiDocLink', () => { it('builds markdown link with description', () => { const entry = { heading: { data: { name: 'Test API' } }, - api: 'test', + path: '/test', llm_description: 'Test description', }; @@ -79,7 +79,7 @@ describe('buildApiDocLink', () => { it('handles doc path replacement', () => { const entry = { heading: { data: { name: 'API Method' } }, - api: 'path', + path: '/path', content: { children: [] }, }; diff --git a/src/generators/man-page/generate.mjs b/src/generators/man-page/generate.mjs index 65619966..6de8862c 100644 --- a/src/generators/man-page/generate.mjs +++ b/src/generators/man-page/generate.mjs @@ -1,6 +1,6 @@ 'use strict'; -import { readFile, writeFile } from 'node:fs/promises'; +import { readFile } from 'node:fs/promises'; import { join } from 'node:path'; import { @@ -8,6 +8,7 @@ import { convertEnvVarToMandoc, } from './utils/converter.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * @param {Array} components diff --git a/src/generators/metadata/types.d.ts b/src/generators/metadata/types.d.ts index 6da44dac..503abb9d 100644 --- a/src/generators/metadata/types.d.ts +++ b/src/generators/metadata/types.d.ts @@ -133,8 +133,10 @@ export type StabilityNode = NodeWithData; * for documentation generation, search indexing, and validation. */ export interface MetadataEntry extends YAMLProperties { - /** API identifier/name */ + /** Path + API identification */ api: string; + path: string; // Note: this is extensionless + basename: string; /** Processed heading with metadata */ heading: HeadingNode; /** Stability classification information */ diff --git a/src/generators/metadata/utils/__tests__/parse.test.mjs b/src/generators/metadata/utils/__tests__/parse.test.mjs index 52fc5efe..ea653099 100644 --- a/src/generators/metadata/utils/__tests__/parse.test.mjs +++ b/src/generators/metadata/utils/__tests__/parse.test.mjs @@ -7,7 +7,7 @@ import { u } from 'unist-builder'; import { parseApiDoc } from '../parse.mjs'; -const file = { stem: 'fs', basename: 'fs.md' }; +const path = 'fs'; const typeMap = {}; const h = (text, depth = 1) => u('heading', { depth }, [u('text', text)]); @@ -26,14 +26,14 @@ describe('parseApiDoc', () => { h('fs'), u('paragraph', [u('text', 'Content.')]), ]); - const results = parseApiDoc({ file, tree }, typeMap); + const results = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(results.length, 1); }); it('populates heading data with text and depth', () => { const tree = u('root', [h('File System')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.heading.data.text, 'File System'); assert.strictEqual(entry.heading.depth, 1); @@ -50,7 +50,7 @@ describe('parseApiDoc', () => { h('foo.bar()', 2), u('paragraph', [u('text', 'Method docs.')]), ]); - const results = parseApiDoc({ file, tree }, typeMap); + const results = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(results.length, 3); }); @@ -62,7 +62,7 @@ describe('parseApiDoc', () => { h('Second'), u('paragraph', [u('text', 'Content B.')]), ]); - const results = parseApiDoc({ file, tree }, typeMap); + const results = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(results[0].heading.data.slug, 'first'); assert.strictEqual(results[1].heading.data.slug, 'second'); @@ -72,7 +72,7 @@ describe('parseApiDoc', () => { describe('YAML metadata', () => { it('extracts added_in', () => { const tree = u('root', [h('fs'), yaml('added: v0.1.0')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.added, 'v0.1.0'); }); @@ -82,7 +82,7 @@ describe('parseApiDoc', () => { h('oldMethod'), yaml('added: v1.0.0\ndeprecated: v2.0.0'), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.added, 'v1.0.0'); assert.strictEqual(entry.deprecated, 'v2.0.0'); @@ -90,7 +90,7 @@ describe('parseApiDoc', () => { it('extracts removed_in', () => { const tree = u('root', [h('removedMethod'), yaml('removed: v3.0.0')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.removed, 'v3.0.0'); }); @@ -106,7 +106,7 @@ describe('parseApiDoc', () => { ' description: The callback is no longer optional.' ), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.changes.length, 1); assert.strictEqual(entry.changes[0].version, 'v7.0.0'); @@ -118,7 +118,7 @@ describe('parseApiDoc', () => { it('extracts tags from a plain comment', () => { const tree = u('root', [h('method'), u('html', '')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.deepStrictEqual(entry.tags, ['legacy']); }); @@ -127,7 +127,7 @@ describe('parseApiDoc', () => { describe('stability index', () => { it('captures stability index and description', () => { const tree = u('root', [h('fs'), stability('Stability: 2 - Stable')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(entry.stability.data.index, '2'); assert.strictEqual(entry.stability.data.description, 'Stable'); @@ -138,7 +138,7 @@ describe('parseApiDoc', () => { h('crypto'), stability('Stability: 1 - Experimental: This API is experimental.'), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual( entry.stability.data.description, @@ -151,17 +151,14 @@ describe('parseApiDoc', () => { h('Stability Index'), stability('Stability: 2 - Stable'), ]); - const [entry] = parseApiDoc( - { file: { stem: 'documentation', basename: 'documentation.md' }, tree }, - typeMap - ); + const [entry] = parseApiDoc({ path: '/documentation', tree }, typeMap); assert.ok(!('stability' in entry)); }); it('has empty stability when no blockquote is present', () => { const tree = u('root', [h('fs')]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.ok(!('stability' in entry)); }); @@ -178,7 +175,7 @@ describe('parseApiDoc', () => { ]), u('definition', { identifier: 'ref', url: 'https://example.com' }), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(findLink(entry)?.url, 'https://example.com'); }); @@ -190,7 +187,7 @@ describe('parseApiDoc', () => { h('fs'), u('paragraph', [u('text', '{string}')]), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.ok( findLink(entry) !== undefined, @@ -207,7 +204,7 @@ describe('parseApiDoc', () => { u('link', { url: 'events.md' }, [u('text', 'events')]), ]), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(findLink(entry)?.url, 'events.html'); }); @@ -219,7 +216,7 @@ describe('parseApiDoc', () => { u('link', { url: 'events.md#some-section' }, [u('text', 'events')]), ]), ]); - const [entry] = parseApiDoc({ file, tree }, typeMap); + const [entry] = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(findLink(entry)?.url, 'events.html#some-section'); }); @@ -230,14 +227,14 @@ describe('parseApiDoc', () => { const tree = u('root', [ u('paragraph', [u('text', 'Just some text without any headings.')]), ]); - const results = parseApiDoc({ file, tree }, typeMap); + const results = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(results.length, 1); }); it('returns an empty array for an empty document', () => { const tree = u('root', []); - const results = parseApiDoc({ file, tree }, typeMap); + const results = parseApiDoc({ path, tree }, typeMap); assert.strictEqual(results.length, 0); }); diff --git a/src/generators/metadata/utils/parse.mjs b/src/generators/metadata/utils/parse.mjs index fb4908c3..63574584 100644 --- a/src/generators/metadata/utils/parse.mjs +++ b/src/generators/metadata/utils/parse.mjs @@ -1,5 +1,8 @@ 'use strict'; +import { basename, sep } from 'node:path/posix'; + +import { slug } from 'github-slugger'; import { u as createTree } from 'unist-builder'; import { findAfter } from 'unist-util-find-after'; import { remove } from 'unist-util-remove'; @@ -16,6 +19,7 @@ import { visitTextWithUnixManualNode, visitYAML, } from './visitors.mjs'; +import { href } from '../../../utils/file.mjs'; import { UNIST } from '../../../utils/queries/index.mjs'; import { getRemark } from '../../../utils/remark.mjs'; import { IGNORE_STABILITY_STEMS } from '../constants.mjs'; @@ -26,11 +30,11 @@ const remarkProcessor = getRemark(); /** * This generator generates a flattened list of metadata entries from a API doc * - * @param {ParserOutput} input + * @param {{ tree: import('mdast.Root') } & import('../types').MetadataEntry} input * @param {Record} typeMap * @returns {Promise>} */ -export const parseApiDoc = ({ file, tree }, typeMap) => { +export const parseApiDoc = ({ path, tree }, typeMap) => { /** * Collection of metadata entries for the file * @type {Array} @@ -40,6 +44,9 @@ export const parseApiDoc = ({ file, tree }, typeMap) => { // Creates a new Slugger instance for the current API doc file const nodeSlugger = createNodeSlugger(); + // Slug the API (We use a non-class slugger, since we are fairly certain that `path` is unique) + const api = slug(path.slice(1).replace(sep, '-')); + // Get all Markdown Footnote definitions from the tree const markdownDefinitions = selectAll('definition', tree); @@ -54,6 +61,11 @@ export const parseApiDoc = ({ file, tree }, typeMap) => { // Removes all the original definitions from the tree as they are not needed anymore remove(tree, markdownDefinitions); + // Make all the typeMap links relative to us + const relativeTypeMap = Object.fromEntries( + Object.entries(typeMap).map(([type, url]) => [type, href(url, path)]) + ); + // Handles the normalisation URLs that reference to API doc files with .md extension visit(tree, UNIST.isMarkdownUrl, node => visitMarkdownLink(node)); @@ -64,16 +76,17 @@ export const parseApiDoc = ({ file, tree }, typeMap) => { // On "About this Documentation", we define the stability indices, and thus // we don't need to check it for stability references - const ignoreStability = IGNORE_STABILITY_STEMS.includes(file.stem); + const ignoreStability = IGNORE_STABILITY_STEMS.includes(api); // Process each heading and create metadata entries visit(tree, UNIST.isHeading, (headingNode, index) => { // Initialize heading headingNode.data = transformNodeToHeading(headingNode); - // Initialize the metadata const metadata = /** @type {import('../types').MetadataEntry} */ ({ - api: file.stem, + api, + path, + basename: basename(path), heading: headingNode, }); @@ -106,7 +119,7 @@ export const parseApiDoc = ({ file, tree }, typeMap) => { // Process type references visit(subTree, UNIST.isTextWithType, (node, _, parent) => - visitTextWithTypeNode(node, parent, typeMap) + visitTextWithTypeNode(node, parent, relativeTypeMap) ); // Process Unix manual references diff --git a/src/generators/orama-db/generate.mjs b/src/generators/orama-db/generate.mjs index c1d963a6..39134009 100644 --- a/src/generators/orama-db/generate.mjs +++ b/src/generators/orama-db/generate.mjs @@ -1,12 +1,13 @@ 'use strict'; -import { writeFile } from 'node:fs/promises'; +import { join } from 'node:path'; import { create, save, insertMultiple } from '@orama/orama'; import { SCHEMA } from './constants.mjs'; import { buildHierarchicalTitle } from './utils/title.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; import { groupNodesByModule } from '../../utils/generators.mjs'; import { transformNodeToString } from '../../utils/unist.mjs'; @@ -36,7 +37,7 @@ export async function generate(input) { description: paragraph ? transformNodeToString(paragraph, true) : undefined, - href: `${entry.api}.html#${entry.heading.data.slug}`, + href: `${entry.path.slice(1)}.html#${entry.heading.data.slug}`, siteSection: headings[0].heading.data.name, }; }) @@ -50,7 +51,7 @@ export async function generate(input) { // Persist if (config.output) { await writeFile( - `${config.output}/orama-db.json`, + join(config.output, 'orama-db.json'), config.minify ? JSON.stringify(result) : JSON.stringify(result, null, 2) ); } diff --git a/src/generators/sitemap/generate.mjs b/src/generators/sitemap/generate.mjs index 20e01e62..fc91cf1f 100644 --- a/src/generators/sitemap/generate.mjs +++ b/src/generators/sitemap/generate.mjs @@ -1,10 +1,11 @@ 'use strict'; -import { readFile, writeFile } from 'node:fs/promises'; +import { readFile } from 'node:fs/promises'; import { join } from 'node:path'; import { createPageSitemapEntry } from './utils/createPageSitemapEntry.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * Generates a sitemap.xml file diff --git a/src/generators/web/generate.mjs b/src/generators/web/generate.mjs index 1679a5a8..57ca2a75 100644 --- a/src/generators/web/generate.mjs +++ b/src/generators/web/generate.mjs @@ -1,12 +1,13 @@ 'use strict'; -import { readFile, writeFile } from 'node:fs/promises'; +import { readFile } from 'node:fs/promises'; import { createRequire } from 'node:module'; import { join } from 'node:path'; import createASTBuilder from './utils/generate.mjs'; import { processJSXEntries } from './utils/processing.mjs'; import getConfig from '../../utils/configuration/index.mjs'; +import { writeFile } from '../../utils/file.mjs'; /** * Main generation function that processes JSX AST entries into web bundles. @@ -36,8 +37,8 @@ export async function generate(input) { // Process all entries together (required for code-split bundles) if (config.output) { // Write HTML files - for (const { html, api } of results) { - await writeFile(join(config.output, `${api}.html`), html, 'utf-8'); + for (const { html, path } of results) { + await writeFile(join(config.output, `${path}.html`), html, 'utf-8'); } // Write code-split JavaScript chunks diff --git a/src/generators/web/template.html b/src/generators/web/template.html index ae85915d..71d0de0e 100644 --- a/src/generators/web/template.html +++ b/src/generators/web/template.html @@ -7,7 +7,7 @@ {{title}} - + @@ -26,6 +26,6 @@
    {{dehydrated}}
    - + diff --git a/src/generators/web/utils/processing.mjs b/src/generators/web/utils/processing.mjs index 14931700..81a71813 100644 --- a/src/generators/web/utils/processing.mjs +++ b/src/generators/web/utils/processing.mjs @@ -1,4 +1,5 @@ import { randomUUID } from 'node:crypto'; +import { sep } from 'node:path/posix'; import { jsx, toJs } from 'estree-util-to-js'; import { transform } from 'lightningcss-wasm'; @@ -110,20 +111,24 @@ export async function processJSXEntries( // Step 3: Create final HTML (could be parallelized in workers) const results = await Promise.all( - entries.map(async ({ data: { api, heading } }) => { - const fileName = `${api}.js`; + entries.map(async ({ data: { api, path, heading } }) => { const title = `${heading.data.name} | ${titleSuffix}`; + // The number of occurances is 1 less than the length of the split + // We also remove 1 `/`, as the path begins with a `/` + const root = '../'.repeat(path.split(sep).length - 2) || './'; + // Replace template placeholders with actual content const renderedHtml = template .replace('{{title}}', title) - .replace('{{dehydrated}}', serverBundle.pages.get(fileName) ?? '') + .replace('{{dehydrated}}', serverBundle.pages.get(`${api}.js`) ?? '') .replace('{{importMap}}', clientBundle.importMap ?? '') - .replace('{{entrypoint}}', `./${fileName}?${randomUUID()}`) + .replace('{{entrypoint}}', `${api}.js?${randomUUID()}`) .replace('{{speculationRules}}', SPECULATION_RULES) - .replace('{{ogTitle}}', title); + .replace('{{ogTitle}}', title) + .replaceAll('{{root}}', root); - return { html: await minifyHTML(renderedHtml), api }; + return { html: await minifyHTML(renderedHtml), path }; }) ); diff --git a/src/threading/__tests__/parallel.test.mjs b/src/threading/__tests__/parallel.test.mjs index 324b0960..766e3d03 100644 --- a/src/threading/__tests__/parallel.test.mjs +++ b/src/threading/__tests__/parallel.test.mjs @@ -72,26 +72,24 @@ describe('createParallelWorker', () => { const mockInput = [ { - file: { stem: 'test1', basename: 'test1.md' }, + path: 'test1', tree: { type: 'root', children: [] }, }, { - file: { stem: 'test2', basename: 'test2.md' }, + path: 'test2', tree: { type: 'root', children: [] }, }, { - file: { stem: 'test3', basename: 'test3.md' }, + path: 'test3', tree: { type: 'root', children: [] }, }, { - file: { stem: 'test4', basename: 'test4.md' }, + path: 'test4', tree: { type: 'root', children: [] }, }, ]; - const chunks = await collectChunks( - worker.stream(mockInput, { typeMap: {} }) - ); + const chunks = await collectChunks(worker.stream(mockInput, {})); strictEqual(chunks.length, 4); @@ -111,18 +109,16 @@ describe('createParallelWorker', () => { const mockInput = [ { - file: { stem: 'test1', basename: 'test1.md' }, + path: 'test1', tree: { type: 'root', children: [] }, }, { - file: { stem: 'test2', basename: 'test2.md' }, + path: 'test2', tree: { type: 'root', children: [] }, }, ]; - const chunks = await collectChunks( - worker.stream(mockInput, { typeMap: {} }) - ); + const chunks = await collectChunks(worker.stream(mockInput, {})); strictEqual(chunks.length, 2); @@ -138,14 +134,12 @@ describe('createParallelWorker', () => { const mockInput = [ { - file: { stem: 'test1', basename: 'test1.md' }, + path: 'test1', tree: { type: 'root', children: [] }, }, ]; - const chunks = await collectChunks( - worker.stream(mockInput, { typeMap: {} }) - ); + const chunks = await collectChunks(worker.stream(mockInput, {})); strictEqual(chunks.length, 1); ok(Array.isArray(chunks[0])); @@ -162,18 +156,16 @@ describe('createParallelWorker', () => { const mockInput = [ { - file: { stem: 'test1', basename: 'test1.md' }, + path: 'test1', tree: { type: 'root', children: [] }, }, { - file: { stem: 'test2', basename: 'test2.md' }, + path: 'test2', tree: { type: 'root', children: [] }, }, ]; - const chunks = await collectChunks( - worker.stream(mockInput, { typeMap: {} }) - ); + const chunks = await collectChunks(worker.stream(mockInput, {})); strictEqual(chunks.length, 2); diff --git a/src/utils/configuration/templates.mjs b/src/utils/configuration/templates.mjs index 802e227b..93256336 100644 --- a/src/utils/configuration/templates.mjs +++ b/src/utils/configuration/templates.mjs @@ -8,7 +8,7 @@ export const GITHUB_BLOB_URL = 'https://github.com/{repository}/blob/{ref}/'; // This is the API docs base URL for editing a file on GitHub UI // TODO(@avivkeller): specify /doc/api in config export const GITHUB_EDIT_URL = - 'https://github.com/{repository}/edit/{ref}/doc/api/'; + 'https://github.com/{repository}/edit/{ref}/doc/api'; /** * Populate a template string based on a configuration diff --git a/src/utils/file.mjs b/src/utils/file.mjs new file mode 100644 index 00000000..f5453749 --- /dev/null +++ b/src/utils/file.mjs @@ -0,0 +1,42 @@ +import fs from 'node:fs/promises'; +import { dirname } from 'node:path'; + +/** + * Returns the input string with the `ext` extension, replacing any pre-existing extension + * @param {string} str + * @param {string} ext + */ +export const withExt = (str, ext) => + `${str.replace(/\.[0-9a-z]+$/i, '')}${ext ? `.${ext}` : ''}`; + +/** + * Writes a file, recursively + * + * @type {typeof fs.writeFile} + */ +export const writeFile = (file, ...args) => + fs + .mkdir(dirname(file), { recursive: true }) + .then(() => fs.writeFile(file, ...args)); + +/** + * Kind of like `path.posix.relative`, however, this functions more like a URL resolution + * @param {string} from + * @param {string} to + * @returns {string} + */ +export const href = (to, from) => { + if (to.includes('://')) { + return to; + } + + const a = to.split('/').filter(Boolean); + const b = from.split('/').slice(0, -1).filter(Boolean); + + while (a[0] === b[0]) { + a.shift(); + b.shift(); + } + + return [...b.map(() => '..'), ...a].join('/'); +}; diff --git a/src/utils/generators.mjs b/src/utils/generators.mjs index f1842508..61c7a8a9 100644 --- a/src/utils/generators.mjs +++ b/src/utils/generators.mjs @@ -39,11 +39,11 @@ export const getVersionFromSemVer = version => * Gets the documentation URL for an API and version * * @param {string} version The version to be parsed - * @param {string} api The document + * @param {string} path The document * @param {string} baseURL */ -export const getVersionURL = (version, api, baseURL) => - `${baseURL}/latest-v${version}/api/${api}.html`; +export const getVersionURL = (version, path, baseURL) => + `${baseURL}/latest-v${version}/api${path}.html`; /** * @TODO: This should not be necessary, and indicates errors within the API docs @@ -155,7 +155,7 @@ export const legacyToJSON = ( * @returns {URL} */ export const buildApiDocURL = (entry, baseURL, useHtml = false) => { - const path = `/docs/latest/api/${entry.api}.${useHtml ? 'html' : 'md'}`; + const path = `/docs/latest/api${entry.path}.${useHtml ? 'html' : 'md'}`; return URL.parse(path, baseURL); };