= {}
try {
- return await transformPageToMarkdown(body, mdxAbsPath, options)
- } catch {
- const sanitizedBody = stripRuntimeMdxSyntax(body)
-
- try {
- return await transformPageToMarkdown(sanitizedBody, mdxAbsPath, options)
- } catch {
- return buildFallbackMarkdownBody(sanitizedBody)
+ chainCache = await getServerSideChainMetadata(CHAINS)
+ } catch (e) {
+ console.error("Failed to load chain metadata:", e)
+ }
+
+ // --------------------------------------------------
+ // STRIP HIDDEN DIRECTIVE (prevents JSX in markdown)
+ // --------------------------------------------------
+ body = body.replace(/[\s\S]*?<\/div>/g, "")
+
+ // --------------------------------------------------
+ // FEEDS
+ // --------------------------------------------------
+ if (body.includes(" = {
+ default: "Price feeds",
+ smartdata: "SmartData feeds",
+ rates: "Rates feeds",
+ tokenizedEquity: "Tokenized equity feeds",
+ usGovernmentMacroeconomicData: "U.S. Government Macroeconomic Data feeds",
}
- }
-}
-function buildFallbackMarkdownBody(body: string): string {
- return stripRuntimeMdxSyntax(body)
- .replace(/<([A-Z][A-Za-z0-9]*)\b[^>]*\/>/g, "")
- .replace(/<([A-Z][A-Za-z0-9]*)\b[^>]*>/g, "")
- .replace(/<\/[A-Z][A-Za-z0-9]*>/g, "")
- .trim()
-}
+ const feedLabel = labelMap[feedType] || feedType
-function stripRuntimeMdxSyntax(body: string): string {
- const lines = body.split("\n")
- const output: string[] = []
+ const replacement = `
+## Feed Contract Addresses
- let skippingExportBlock = false
- let skippingImportBlock = false
- let braceDepth = 0
+For programmatic access:
+${BASE_URL}/data-feeds/feed-addresses/${feedType}/ethereum-mainnet.json
- for (const line of lines) {
- const trimmed = line.trim()
+First retrieve available networks:
+${BASE_URL}/data-feeds/feed-addresses/${feedType}.json
- if (skippingImportBlock) {
- if (trimmed.includes(" from ") || trimmed.endsWith('"') || trimmed.endsWith("'")) {
- skippingImportBlock = false
- }
- continue
- }
+Use this dataset to determine the correct network queryString before retrieving a specific network dataset.
- if (skippingExportBlock) {
- braceDepth += countChar(line, "{")
- braceDepth -= countChar(line, "}")
+The interactive address table on this page is loaded dynamically and is not included in this markdown export.
- if (braceDepth <= 0) {
- skippingExportBlock = false
- braceDepth = 0
- }
- continue
- }
+- ${feedLabel}${feedType === "default" ? " (default dataset)" : ""}:
+/data-feeds/feed-addresses/${feedType}.json
- if (/^import\s+/.test(trimmed)) {
- if (!trimmed.includes(" from ")) skippingImportBlock = true
- continue
- }
+- Per-network datasets:
+/data-feeds/feed-addresses/${feedType}/{network}.json
+`
- if (/^export\s+(async\s+)?function\s+/.test(trimmed)) {
- skippingExportBlock = true
- braceDepth = countChar(line, "{") - countChar(line, "}")
- continue
- }
+ body = body.replace(//g, replacement)
+ }
+ // --------------------------------------------------
+ // STREAMS
+ // --------------------------------------------------
+ if (body.includes(" = {
+ crypto: "Crypto streams",
+ rwa: "RWA streams",
+ exchangeRate: "Exchange rate streams",
+ smartdata: "SmartData streams",
+ tokenizedAsset: "Tokenized asset streams",
}
- output.push(line)
+ const streamLabel = streamLabelMap[rawType] || rawType
+
+ const replacement = `
+## Stream IDs
+
+For programmatic access:
+${BASE_URL}/data-streams/stream-ids/${rawType}.json
+
+The interactive stream table on this page is loaded dynamically and is not included in this markdown export.
+
+- ${streamLabel}:
+/data-streams/stream-ids/${rawType}.json
+`
+
+ body = body.replace(//g, replacement)
}
- return output.join("\n")
+ try {
+ return await transformPageToMarkdown(body, mdxAbsPath, options)
+ } catch {
+ return body
+ }
}
-function countChar(value: string, char: string): number {
- return value.split(char).length - 1
-}
+// -----------------------
+// UTILITIES
+// -----------------------
function normalizeMarkdownPath(pathParam: string | undefined): string | null {
if (!pathParam) return null
@@ -294,37 +218,3 @@ async function findContentFile(cleanPath: string): Promise {
return null
}
-
-function buildMarkdownMovedResponse(sourcePath: string, targetPath: string): Response {
- const sourceUrl = `${SITE_BASE}/${sourcePath}`
- const targetUrl = `/${targetPath}.md`
-
- return new Response(
- [
- `# Redirect`,
- `Source: ${sourceUrl}`,
- "",
- LLMS_DIRECTIVE,
- "",
- "This page has moved.",
- "",
- `Use the current documentation: [${targetPath}](${targetUrl}).`,
- "",
- ].join("\n"),
- { status: 200, headers: markdownHeaders }
- )
-}
-
-function buildCreSelectorMarkdown(canonicalPath: string, resolution: any): string {
- const canonicalUrl = `${SITE_BASE}/${canonicalPath}`
- return [
- `# ${canonicalPath}`,
- `Source: ${canonicalUrl}`,
- "",
- LLMS_DIRECTIVE,
- "",
- `- Go: /${resolution.goPath}.md`,
- `- TypeScript: /${resolution.tsPath}.md`,
- "",
- ].join("\n")
-}
diff --git a/src/pages/data-feeds/feed-addresses/[type].json.ts b/src/pages/data-feeds/feed-addresses/[type].json.ts
new file mode 100644
index 00000000000..2140a45953e
--- /dev/null
+++ b/src/pages/data-feeds/feed-addresses/[type].json.ts
@@ -0,0 +1,26 @@
+import type { APIRoute } from "astro"
+
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { collectFeedEntries, VALID_FEED_TYPES } from "~/features/feeds/utils/feedOutput.ts"
+
+export const prerender = false
+
+export const GET: APIRoute = async ({ params }) => {
+ const type = params.type as string
+
+ if (!VALID_FEED_TYPES.includes(type as any)) {
+ return new Response(`Invalid type "${type}"`, { status: 400 })
+ }
+
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const feeds = collectFeedEntries(type as any, null, chainCache)
+
+ return new Response(JSON.stringify(feeds, null, 2), {
+ headers: {
+ "Content-Type": "application/json",
+ "Cache-Control": "public, max-age=300",
+ },
+ })
+}
diff --git a/src/pages/data-feeds/feed-addresses/[type].txt.ts b/src/pages/data-feeds/feed-addresses/[type].txt.ts
new file mode 100644
index 00000000000..af3d09ab2dd
--- /dev/null
+++ b/src/pages/data-feeds/feed-addresses/[type].txt.ts
@@ -0,0 +1,98 @@
+import type { APIRoute } from "astro"
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { VALID_FEED_TYPES } from "~/features/feeds/utils/feedOutput.ts"
+import type { DataFeedType } from "~/features/feeds/components/FeedList.tsx"
+
+export function getStaticPaths() {
+ return VALID_FEED_TYPES.map((type) => ({
+ params: { type },
+ }))
+}
+
+export const GET: APIRoute = async ({ params }) => {
+ const type = params.type as DataFeedType
+
+ if (!VALID_FEED_TYPES.includes(type)) {
+ return new Response(`Invalid type "${type}"`, { status: 400 })
+ }
+
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const networks: {
+ queryString: string
+ networkName: string
+ chain: string
+ networkType: string
+ }[] = []
+
+ const seen = new Set()
+
+ for (const chain of Object.values(chainCache)) {
+ // ✅ FIX: rename to avoid shadowing
+ const chainNetworks = (chain as { networks?: any[] }).networks ?? []
+
+ for (const network of chainNetworks) {
+ const queryString = network.queryString
+ if (!queryString) continue
+
+ if (seen.has(queryString)) continue
+ seen.add(queryString)
+
+ networks.push({
+ queryString,
+ networkName: network.name,
+ chain: typeof network.chain === "string" ? network.chain : "",
+ networkType: network.networkType || "mainnet",
+ })
+ }
+ }
+
+ // Sort: mainnet first, then testnet
+ networks.sort((a, b) => {
+ if (a.networkType !== b.networkType) {
+ return a.networkType === "mainnet" ? -1 : 1
+ }
+ return a.queryString.localeCompare(b.queryString)
+ })
+
+ const lines: string[] = []
+
+ lines.push(`# Chainlink Feed Addresses Index (${type})`)
+ lines.push("")
+ lines.push("This document lists all available networks for this feed type.")
+ lines.push("")
+ lines.push("Each network has a dedicated dataset.")
+ lines.push("")
+ lines.push("Do not load multiple networks unless required.")
+ lines.push("Each file contains the complete dataset for one network.")
+ lines.push("")
+
+ lines.push("## Networks")
+ lines.push("")
+
+ for (const net of networks) {
+ lines.push(`- ${net.queryString} → /data-feeds/feed-addresses/${type}/${net.queryString}.txt`)
+ lines.push(` name: ${net.networkName}`)
+ lines.push(` type: ${net.networkType}`)
+ if (net.chain) {
+ lines.push(` chain: ${net.chain}`)
+ }
+ lines.push("")
+ }
+
+ lines.push("## Usage pattern")
+ lines.push("")
+ lines.push("1. Select a network from the list above")
+ lines.push("2. Fetch the corresponding dataset")
+ lines.push("3. Filter by feed name as needed")
+ lines.push("")
+
+ return new Response(lines.join("\n"), {
+ status: 200,
+ headers: {
+ "Content-Type": "text/plain; charset=utf-8",
+ "Cache-Control": "public, max-age=0, s-maxage=86400, stale-while-revalidate=604800",
+ },
+ })
+}
diff --git a/src/pages/data-feeds/feed-addresses/[type]/[network].json.ts b/src/pages/data-feeds/feed-addresses/[type]/[network].json.ts
new file mode 100644
index 00000000000..821afded784
--- /dev/null
+++ b/src/pages/data-feeds/feed-addresses/[type]/[network].json.ts
@@ -0,0 +1,31 @@
+import type { APIRoute } from "astro"
+
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { collectFeedEntries, VALID_FEED_TYPES } from "~/features/feeds/utils/feedOutput.ts"
+
+export const prerender = false
+
+export const GET: APIRoute = async ({ params }) => {
+ const type = params.type as string
+ const network = params.network ?? null
+
+ if (!VALID_FEED_TYPES.includes(type as any)) {
+ return new Response(`Invalid type "${type}"`, {
+ status: 400,
+ headers: { "Content-Type": "text/plain; charset=utf-8" },
+ })
+ }
+
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const feeds = collectFeedEntries(type as any, network, chainCache)
+
+ return new Response(JSON.stringify(feeds, null, 2), {
+ status: 200,
+ headers: {
+ "Content-Type": "application/json",
+ "Cache-Control": "public, max-age=300",
+ },
+ })
+}
diff --git a/src/pages/data-feeds/feed-addresses/[type]/[network].txt.ts b/src/pages/data-feeds/feed-addresses/[type]/[network].txt.ts
new file mode 100644
index 00000000000..d000e440473
--- /dev/null
+++ b/src/pages/data-feeds/feed-addresses/[type]/[network].txt.ts
@@ -0,0 +1,78 @@
+/**
+ * Static snapshots of feed addresses per network, pre-rendered at build time.
+ * Served at /data-feeds/feed-addresses/{type}/{network}.txt
+ *
+ * For always-live data use the dynamic endpoint: /api/feeds/addresses
+ */
+
+import type { APIRoute } from "astro"
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { buildFeedAddressMarkdown, VALID_FEED_TYPES } from "~/features/feeds/utils/feedOutput.ts"
+import { STREAM_CATEGORY_MAP } from "~/features/feeds/utils/streamMetadata.ts"
+import type { DataFeedType } from "~/features/feeds/components/FeedList.tsx"
+
+// Reverse map: internal → public
+const INTERNAL_TO_PUBLIC: Record = Object.fromEntries(
+ Object.entries(STREAM_CATEGORY_MAP).map(([pub, internal]) => [internal, pub])
+)
+
+// ✅ Build-time route generation
+export async function getStaticPaths() {
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const paths: { params: { type: string; network: string } }[] = []
+ const seen = new Set()
+
+ for (const type of VALID_FEED_TYPES) {
+ for (const chain of Object.values(chainCache)) {
+ const networks = (chain as any).networks ?? []
+
+ for (const network of networks) {
+ const queryString = network.queryString
+ if (!queryString) continue
+
+ const key = `${type}:${queryString}`
+ if (seen.has(key)) continue
+ seen.add(key)
+
+ paths.push({
+ params: {
+ type,
+ network: queryString,
+ },
+ })
+ }
+ }
+ }
+
+ return paths
+}
+
+// ❌ IMPORTANT: no `prerender = false` here
+// this must run at build time
+
+export const GET: APIRoute = async ({ params }) => {
+ const type = params.type as DataFeedType
+ const network = typeof params.network === "string" ? params.network : null
+
+ if (!VALID_FEED_TYPES.includes(type)) {
+ return new Response(`Invalid type "${type}"`, { status: 400 })
+ }
+
+ const publicType = INTERNAL_TO_PUBLIC[type]
+
+ // ✅ Safe at build time (filesystem allowed)
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const markdown = buildFeedAddressMarkdown(type, network, chainCache, "https://docs.chain.link", { publicType })
+
+ return new Response(markdown, {
+ status: 200,
+ headers: {
+ "Content-Type": "text/plain; charset=utf-8",
+ // Long CDN cache — content only changes on redeploy
+ "Cache-Control": "public, max-age=0, s-maxage=86400, stale-while-revalidate=604800",
+ },
+ })
+}
diff --git a/src/pages/data-streams/networks.txt.ts b/src/pages/data-streams/networks.txt.ts
new file mode 100644
index 00000000000..f889453d4e7
--- /dev/null
+++ b/src/pages/data-streams/networks.txt.ts
@@ -0,0 +1,105 @@
+import type { APIRoute } from "astro"
+import { StreamsNetworksData } from "~/features/feeds/data/StreamsNetworksData.ts"
+import { textPlainHeaders } from "@lib/api/cacheHeaders.js"
+
+export const prerender = false
+
+interface StreamNetworkEntry {
+ network: string
+ label: string
+ verifierProxy: string
+}
+
+function getNetworks(): {
+ mainnet: StreamNetworkEntry[]
+ testnet: StreamNetworkEntry[]
+} {
+ const mainnet: StreamNetworkEntry[] = []
+ const testnet: StreamNetworkEntry[] = []
+
+ for (const entry of StreamsNetworksData) {
+ if (entry.isCanton) continue
+
+ if (entry.mainnet?.verifierProxy) {
+ mainnet.push({
+ network: entry.network,
+ label: entry.mainnet.label,
+ verifierProxy: entry.mainnet.verifierProxy,
+ })
+ }
+
+ if (entry.testnet?.verifierProxy) {
+ testnet.push({
+ network: entry.network,
+ label: entry.testnet.label,
+ verifierProxy: entry.testnet.verifierProxy,
+ })
+ }
+ }
+
+ return { mainnet, testnet }
+}
+
+function buildMarkdown(): string {
+ const { mainnet, testnet } = getNetworks()
+
+ const lines: string[] = [
+ "# Chainlink Data Streams Networks",
+ "",
+ "Supported networks and verifier proxy addresses.",
+ "",
+ "Verifier proxies are shared across all Data Streams categories and schema versions.",
+ "",
+ "Use the verifier proxy for the network where your application is deployed.",
+ "",
+ "---",
+ "",
+ "## Mainnet Networks",
+ "",
+ "| Network | Label | Verifier Proxy |",
+ "|---------|-------|----------------|",
+ ]
+
+ for (const n of mainnet) {
+ lines.push(`| ${n.network} | ${n.label} | \`${n.verifierProxy}\` |`)
+ }
+
+ if (testnet.length > 0) {
+ lines.push(
+ "",
+ "## Testnet Networks",
+ "",
+ "| Network | Label | Verifier Proxy |",
+ "|---------|-------|----------------|"
+ )
+
+ for (const n of testnet) {
+ lines.push(`| ${n.network} | ${n.label} | \`${n.verifierProxy}\` |`)
+ }
+ }
+
+ lines.push(
+ "",
+ "---",
+ "",
+ "Use this file together with stream ID datasets:",
+ "",
+ "- /data-streams/stream-ids/{type}.txt",
+ "",
+ "Stream IDs are universal. Networks provide verifier proxy addresses."
+ )
+
+ return lines.join("\n")
+}
+
+export const GET: APIRoute = async () => {
+ const markdown = buildMarkdown()
+
+ return new Response(markdown, {
+ status: 200,
+ headers: {
+ ...textPlainHeaders,
+ "Cache-Control": "public, max-age=3600, s-maxage=86400",
+ },
+ })
+}
diff --git a/src/pages/data-streams/stream-ids/[type].json.ts b/src/pages/data-streams/stream-ids/[type].json.ts
new file mode 100644
index 00000000000..8981340892e
--- /dev/null
+++ b/src/pages/data-streams/stream-ids/[type].json.ts
@@ -0,0 +1,32 @@
+import type { APIRoute } from "astro"
+
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { collectStreamEntries } from "~/features/feeds/utils/feedOutput.ts"
+import { STREAM_CATEGORY_MAP } from "~/features/feeds/utils/streamMetadata.ts"
+
+export const prerender = false
+
+export const GET: APIRoute = async ({ params }) => {
+ const rawType = params.type as string
+ const internalType = STREAM_CATEGORY_MAP[rawType]
+
+ if (!internalType) {
+ return new Response(`Invalid type "${rawType}"`, {
+ status: 400,
+ headers: { "Content-Type": "text/plain; charset=utf-8" },
+ })
+ }
+
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const streams = collectStreamEntries(internalType, chainCache, { publicType: rawType } as any)
+
+ return new Response(JSON.stringify(streams, null, 2), {
+ status: 200,
+ headers: {
+ "Content-Type": "application/json",
+ "Cache-Control": "public, max-age=300",
+ },
+ })
+}
diff --git a/src/pages/data-streams/stream-ids/[type].txt.ts b/src/pages/data-streams/stream-ids/[type].txt.ts
new file mode 100644
index 00000000000..12f5688eff8
--- /dev/null
+++ b/src/pages/data-streams/stream-ids/[type].txt.ts
@@ -0,0 +1,113 @@
+import type { APIRoute } from "astro"
+import { getServerSideChainMetadata } from "~/features/data/api/backend.ts"
+import { CHAINS } from "~/features/data/chains.ts"
+import { buildFeedAddressMarkdown, type FeedMarkdownOptions } from "~/features/feeds/utils/feedOutput.ts"
+import { STREAM_CATEGORY_MAP } from "~/features/feeds/utils/streamMetadata.ts"
+import { textPlainHeaders } from "@lib/api/cacheHeaders.js"
+
+// ✅ Build-time route generation
+export function getStaticPaths() {
+ return Object.keys(STREAM_CATEGORY_MAP).map((type) => ({
+ params: { type },
+ }))
+}
+
+export const GET: APIRoute = async ({ params }) => {
+ const rawType = params.type as string
+
+ const internalType = STREAM_CATEGORY_MAP[rawType]
+ if (!internalType) {
+ const valid = Object.keys(STREAM_CATEGORY_MAP).join(", ")
+ return new Response(`Invalid type "${rawType}". Valid values: ${valid}`, {
+ status: 400,
+ headers: { "Content-Type": "text/plain; charset=utf-8" },
+ })
+ }
+
+ // ✅ Safe at build time
+ const chainCache = await getServerSideChainMetadata(CHAINS)
+
+ const options: FeedMarkdownOptions = {
+ publicType: rawType,
+ }
+
+ let markdown = buildFeedAddressMarkdown(internalType, null, chainCache, "https://docs.chain.link", options)
+
+ // --------------------------------------------------
+ // CLEANUP: remove API + cross-product references
+ // --------------------------------------------------
+
+ markdown = markdown
+ .replace(/Machine-readable endpoint:[^\n]*\n?/g, "")
+ .replace(/Supported networks and verifier proxy addresses:[^\n]*\n?/g, "")
+ .replace(/Static snapshot:[^\n]*\n?/g, "")
+
+ // --------------------------------------------------
+ // SCHEMA DETECTION
+ // --------------------------------------------------
+
+ const schemaMatches = [...markdown.matchAll(/\|\s*[^|]+\s*\|\s*`?0x[a-fA-F0-9]+`?\s*\|\s*[^|]*\|\s*(v\d+)/g)]
+
+ const schemas = Array.from(new Set(schemaMatches.map((m) => m[1])))
+
+ // --------------------------------------------------
+ // SCHEMA → DOCS MAP
+ // --------------------------------------------------
+
+ const SCHEMA_DOCS: Record = {
+ v3: "https://docs.chain.link/data-streams/reference/report-schema-v3",
+ v7: "https://docs.chain.link/data-streams/reference/report-schema-v7",
+ v8: "https://docs.chain.link/data-streams/reference/report-schema-v8",
+ v9: "https://docs.chain.link/data-streams/reference/report-schema-v9",
+ v10: "https://docs.chain.link/data-streams/reference/report-schema-v10",
+ v11: "https://docs.chain.link/data-streams/reference/report-schema-v11",
+ }
+
+ // --------------------------------------------------
+ // BUILD INTRO BLOCK
+ // --------------------------------------------------
+
+ const introLines = [
+ `> Stream IDs for Chainlink Data Streams – ${capitalize(rawType)}.`,
+ `> These IDs are universal and valid across all supported networks.`,
+ `> To use a stream ID, retrieve the verifier proxy for the target network from /data-streams/networks.txt.`,
+ `> Datasets may contain multiple schema versions. Filter by schema if needed.`,
+ ]
+
+ if (schemas.length > 0) {
+ introLines.push(`> Schemas present in this dataset:`)
+
+ for (const s of schemas) {
+ const url = SCHEMA_DOCS[s]
+ if (url) {
+ introLines.push(`> - \`${s}\` → ${url}`)
+ } else {
+ introLines.push(`> - ${s}`)
+ }
+ }
+ }
+
+ const introBlock = introLines.join("\n") + "\n"
+
+ // --------------------------------------------------
+ // REPLACE ORIGINAL INTRO
+ // --------------------------------------------------
+
+ markdown = markdown.replace(/> Stream IDs[\s\S]*?\n/, introBlock)
+
+ return new Response(markdown.trim(), {
+ status: 200,
+ headers: {
+ ...textPlainHeaders,
+ "Cache-Control": "public, max-age=3600, s-maxage=86400",
+ },
+ })
+}
+
+// -----------------------
+// Utility
+// -----------------------
+
+function capitalize(value: string): string {
+ return value.charAt(0).toUpperCase() + value.slice(1)
+}